def collect(self): self.flow = self.plugin_args.flow if isinstance(self.flow, basestring): self.flow = serializer.unserialize(json.loads(self.flow), session=self.session) elif isinstance(self.flow, dict): self.flow = serializer.unserialize(self.flow, session=self.session) elif not isinstance(self.flow, agent.Flow): raise plugin.PluginError("Flow must be provided as JSON string.") for status in self._run_flow(): self.flow.ticket.send_status(status) yield status
def calculate(self): config_data = self.session.GetParameter("agent_config_data") if not config_data: config_data = os.environ.get("REKALL_AGENT_CONFIG") if not config_data: # The configuration file can be given in the session, or specified # on the command line. This is the path to the agent config file. agent_config = self.session.GetParameter("agent_configuration") if not agent_config: agent_config = os.environ.get("REKALL_AGENT_CONFIG_FILE") if agent_config: # Set the search path to the location of the configuration # file. This allows @file directives to access files relative to # the main config file. if self.session.GetParameter("config_search_path") == None: self.session.SetParameter( "config_search_path", [os.path.dirname(agent_config)]) with open(agent_config, "rb") as fd: config_data = fd.read() if not config_data: return obj.NoneObject("No valid configuration provided in session.") # We deliberately do not raise errors for unknown fields in # case the configuration was created in older agent version - # We just ignore unknown fields. return serializer.unserialize( session=self.session, data=yaml.safe_load(config_data), strict_parsing=False)
def calculate(self): config_data = self.session.GetParameter("agent_config_data") if not config_data: config_data = os.environ.get("REKALL_AGENT_CONFIG") if not config_data: # The configuration file can be given in the session, or specified # on the command line. This is the path to the agent config file. agent_config = self.session.GetParameter("agent_configuration") if not agent_config: agent_config = os.environ.get("REKALL_AGENT_CONFIG_FILE") if agent_config: # Set the search path to the location of the configuration # file. This allows @file directives to access files relative to # the main config file. if self.session.GetParameter("config_search_path") == None: self.session.SetParameter("config_search_path", [os.path.dirname(agent_config)]) with open(agent_config, "rb") as fd: config_data = fd.read() if not config_data: return obj.NoneObject( "No valid configuration provided in session.") # We deliberately do not raise errors for unknown fields in # case the configuration was created in older agent version - # We just ignore unknown fields. return serializer.unserialize(session=self.session, data=yaml.safe_load(config_data), strict_parsing=False)
def collect(self): self.flow = self.plugin_args.flow if isinstance(self.flow, basestring): self.flow = serializer.unserialize( json.loads(self.flow), session=self.session, # Allow for future addition of fields. strict_parsing=False) elif isinstance(self.flow, dict): self.flow = serializer.unserialize( self.flow, session=self.session, strict_parsing=False) elif not isinstance(self.flow, agent.Flow): raise plugin.PluginError("Flow must be provided as JSON string.") for status in self._run_flow(): self.flow.ticket.send_status(status) yield dict(status=status.copy())
def _run_flow(self): # Flow has a condition - we only run the flow if the condition matches. if self.flow.condition: try: if not list(self.session.plugins.search(self.flow.condition)): self.session.logging.debug( "Ignoring flow %s because condition %s is not true.", self.flow.flow_id, self.flow.condition) return # If the query failed to run we must ignore this flow. except Exception as e: self.session.logging.exception(e) return # Prepare the session specified by this flow. status = self.flow.status for action in self.flow.actions: try: # Make a progress ticket for this action if required. status.status = "Started" status.client_id = self._config.client.client_id status.current_action = action yield status # Run the action with the new session, and report the produced # collections. Note that the ticket contains all collections for # all actions cumulatively. rekall_session = self._get_session(action.rekall_session) action_to_run = serializer.unserialize(action.to_primitive(), session=rekall_session, strict_parsing=False) for collection in (action_to_run.run(flow_obj=self.flow) or []): status.collections.append(collection) # Update the server on our progress self.flow.ticket.send_status(status) except Exception as e: status.status = "Error" status.error = utils.SmartUnicode(e) status.backtrace = traceback.format_exc() yield status self.flow.ticket.send_status(status) self.session.logging.exception(e) return status.status = "Done" status.current_action = None yield status
def log(current, type, **kwargs): kwargs["__type__"] = type user = kwargs["user"] = utils.get_current_username(current) # Access was made via a token. if current.request.token: kwargs["token_id"] = current.request.token["token_id"] current.db.audit.insert(timestamp=datetime.datetime.now(), message=serializer.unserialize(kwargs), user=user, type=type)
def _run_flow(self): # Flow has a condition - we only run the flow if the condition matches. if self.flow.condition: try: if not list(self.session.plugins.search(self.flow.condition)): self.session.logging.debug( "Ignoring flow %s because condition %s is not true.", self.flow.flow_id, self.flow.condition) return # If the query failed to run we must ignore this flow. except Exception as e: self.session.logging.exception(e) return # Prepare the session specified by this flow. status = self.flow.status for action in self.flow.actions: try: # Make a progress ticket for this action if required. status.status = "Started" status.client_id = self._config.client.client_id status.current_action = action yield status # Run the action with the new session, and report the produced # collections. Note that the ticket contains all collections for # all actions cumulatively. rekall_session = self._get_session(action.rekall_session) action_to_run = serializer.unserialize( action.to_primitive(), session=rekall_session, strict_parsing=False) for collection in (action_to_run.run(flow_obj=self.flow) or []): status.collections.append(collection) # Update the server on our progress self.flow.ticket.send_status(status) except Exception as e: status.status = "Error" status.error = utils.SmartUnicode(e) status.backtrace = traceback.format_exc() yield status self.flow.ticket.send_status(status) self.session.logging.exception(e) return status.status = "Done" status.current_action = None yield status
def decode(x): if not x: data = dict(__type__=cls.__name__) else: data = json.loads(x) try: return serializer.unserialize(data, strict_parsing=False) except ValueError as e: logging.debug("%s: Unable to decode %s", e, data) # If the data in the table is seriously messed up we just replace it # with a new object. return cls()
def GetConfig(current): yaml_path = os.path.join(current.request.folder, "private", "server_config.yaml") global _CONFIG if _CONFIG is None: try: _CONFIG = serializer.unserialize(yaml.load(open(yaml_path).read()), strict_parsing=True, type=ServerConfig) except (IOError, ValueError): # File does not exist, we just make an empty one. _CONFIG = ServerConfig() return _CONFIG
def wrapper(current): header = current.request.env['HTTP_X_REKALL_SIGNATURE'] if not header: raise PermissionDenied() header = serializer.unserialize(header, strict_parsing=False, type=crypto.HTTPSignature) if header: if header.public_key: current.client_id = header.public_key.client_id() data = "" data = current.request.body.getvalue() if header.public_key.verify(header.assertion + data, header.signature): assertion = serializer.unserialize(header.assertion, strict_parsing=False) if assertion: asserted_url = urlparse.urlparse(assertion.url) our_url = urlparse.urlparse( current.request.env.web2py_original_uri or "") if asserted_url.path == our_url.path: return True raise PermissionDenied()
def testAgent(self): flow_data = dict( __type__="Flow", rekall_session=dict(live="API", logging_level="debug"), ticket=dict(location=dict( __type__="FileLocation", path_prefix=self.temp_directory, path_template="ticket.json", )), actions=[ dict( __type__="PluginAction", plugin="Search", args=dict( query="select proc from pslist() where proc.pid < 10"), collection=dict( __type__="JSONCollection", id=str(uuid.uuid4()), location=dict(__type__="FileLocation", path_prefix=self.temp_directory, path_template="collection.json"), )) ]) # Validate the data flow_obj = serializer.unserialize(flow_data, session=self.session, strict_parsing=True) statuses = [ x["status"] for x in self.session.plugins.run_flow(flow_obj) ] self.assertEqual(statuses[0].status, "Started") self.assertEqual(statuses[1].status, "Done") self.assertGreater(statuses[1].timestamp, statuses[0].timestamp) self.assertEqual(len(statuses[1].collection_ids), 1) self.assertIn(statuses[0].current_action.collection.id, statuses[1].collection_ids) with open(os.path.join(self.temp_directory, "collection.json")) as fd: collection_data = json.load(fd) # Should be two tables, one for data and one for logs self.assertEqual(len(collection_data["tables"]), 2) self.assertEqual(collection_data["tables"][0]["name"], "logs") self.assertEqual(collection_data["tables"][1]["name"], "data") self.assertEqual(collection_data["part_number"], 0)
def _read_all_flows(self): result = [] for data in common.THREADPOOL.imap_unordered( _LocationTracker.get_data, self.jobs_locations): try: if data: job_file = serializer.unserialize( json.loads(data), session=self.session, strict_parsing=False) result.extend(job_file.flows) except Exception as e: if self.session.GetParameter("debug"): raise self.session.logging.error("Error %r: %s", e, e) return result
def testAgent(self): flow_data = dict( __type__="Flow", rekall_session=dict(live="API", logging_level="debug"), ticket=dict( location=dict( __type__="FileLocation", path_prefix=self.temp_directory, path_template="ticket.json", )), actions=[ dict(__type__="PluginAction", plugin="Search", args=dict( query="select proc from pslist() where proc.pid < 10"), collection=dict( __type__="JSONCollection", id=str(uuid.uuid4()), location=dict( __type__="FileLocation", path_prefix=self.temp_directory, path_template="collection.json"), )) ]) # Validate the data flow_obj = serializer.unserialize( flow_data, session=self.session, strict_parsing=True) statuses = [x["status"] for x in self.session.plugins.run_flow(flow_obj)] self.assertEqual(statuses[0].status, "Started") self.assertEqual(statuses[1].status, "Done") self.assertGreater(statuses[1].timestamp, statuses[0].timestamp) self.assertEqual(len(statuses[1].collection_ids), 1) self.assertIn(statuses[0].current_action.collection.id, statuses[1].collection_ids) with open(os.path.join(self.temp_directory, "collection.json")) as fd: collection_data = json.load(fd) # Should be two tables, one for data and one for logs self.assertEqual(len(collection_data["tables"]), 2) self.assertEqual(collection_data["tables"][0]["name"], "logs") self.assertEqual(collection_data["tables"][1]["name"], "data") self.assertEqual(collection_data["part_number"], 0)
def _read_all_flows(self): result = [] for job_location in self._config.client.get_jobs_queues(): data = job_location.read_file( if_modified_since=self.writeback.last_flow_time.timestamp) try: if data: job_file = serializer.unserialize(json.loads(data), session=self.session, strict_parsing=False) result.extend(job_file.flows) except Exception as e: if self.session.GetParameter("debug"): raise self.session.logging.error("Error %r: %s", e, e) return result
def _read_all_flows(self): result = [] for job_location in self._config.client.get_jobs_queues(): data = job_location.read_file( if_modified_since=self.writeback.last_flow_time.timestamp) try: if data: job_file = serializer.unserialize( json.loads(data), session=self.session, strict_parsing=False) result.extend(job_file.flows) except Exception as e: if self.session.GetParameter("debug"): raise self.session.logging.error("Error %r: %s", e, e) return result