def testBadPickle(self): """Test that we can recover some of the bad pickle.""" state = rdfvalue.FlowState() # Store an instance of a RDFURN here. state.Register("urn", rdfvalue.RDFURN("aff4:/")) serialized = state.SerializeToString() # Substitute the class with something entirely different. with test_lib.Stubber(rdfvalue, "RDFURN", None): # We now should not be able to restore the state normally since we can not # find the RDFURN instance. result = rdfvalue.FlowState(serialized) # The pickle error should be available here. self.assertTrue(isinstance(result.errors, TypeError)) # The bad field should be replaced with an UnknownObject instance. self.assertTrue(isinstance(result.urn, flows.UnknownObject)) # Missing attribute is a different kind of error, but this is still # trapped. del rdfvalue.RDFURN result = rdfvalue.FlowState(serialized) self.assertTrue(isinstance(result.errors, AttributeError)) self.assertTrue(isinstance(result.urn, flows.UnknownObject))
def Start(self): """Initializes this hunt from arguments.""" self.state.context.Register("results_metadata_urn", self.urn.Add("ResultsMetadata")) self.state.context.Register("results_collection_urn", self.urn.Add("Results")) with aff4.FACTORY.Create(self.state.context.results_metadata_urn, "HuntResultsMetadata", mode="rw", token=self.token) as results_metadata: state = rdfvalue.FlowState() plugins = self.state.args.output_plugins or [] for index, plugin in enumerate(plugins): plugin_obj = plugin.GetPluginForHunt(self) state.Register("%s_%d" % (plugin.plugin_name, index), (plugin, plugin_obj.state)) results_metadata.Set(results_metadata.Schema.OUTPUT_PLUGINS(state)) with aff4.FACTORY.Create(self.state.context.results_collection_urn, "RDFValueCollection", mode="rw", token=self.token) as results_collection: results_collection.SetChunksize(1024 * 1024) self.state.context.Register("results_collection", results_collection) self.SetDescription()
def ReadCronState(self): try: cron_job = aff4.FACTORY.Open(self.cron_job_urn, aff4_type="CronJob", token=self.token) return cron_job.Get(cron_job.Schema.STATE, default=rdfvalue.FlowState()) except aff4.InstantiationError as e: raise StateReadError(e)
def testFlowState(self): state = rdfvalue.FlowState() state.Register("teststate", 1) state.teststate = 100 state.Register("context", flows.DataObject()) state.context.testcontext = 50 s = state.SerializeToString() new_state = rdfvalue.FlowState() new_state.ParseFromString(s) self.assertEqual(new_state.teststate, 100) self.assertEqual(new_state.context.testcontext, 50) # context and teststate self.assertEqual(len(new_state), 2) self.assertEqual(len(new_state.context), 1)
def Start(self): """Initializes this hunt from arguments.""" self.state.context.Register("results_metadata_urn", self.urn.Add("ResultsMetadata")) self.state.context.Register("results_collection_urn", self.urn.Add("Results")) self.state.context.Register("output_plugins_base_urn", self.urn.Add("Results")) with aff4.FACTORY.Create( self.state.context.results_metadata_urn, "HuntResultsMetadata", mode="rw", token=self.token) as results_metadata: state = rdfvalue.FlowState() try: plugins_descriptors = self.state.args.output_plugins except AttributeError: plugins_descriptors = [] for index, plugin_descriptor in enumerate(plugins_descriptors): output_base_urn = self.state.context.output_plugins_base_urn.Add( plugin_descriptor.plugin_name) plugin_class = plugin_descriptor.GetPluginClass() plugin_obj = plugin_class(self.state.context.results_collection_urn, output_base_urn=output_base_urn, args=plugin_descriptor.plugin_args, token=self.token) state.Register("%s_%d" % (plugin_descriptor.plugin_name, index), (plugin_descriptor, plugin_obj.state)) results_metadata.Set(results_metadata.Schema.OUTPUT_PLUGINS(state)) # Create results collection. with aff4.FACTORY.Create( self.state.context.results_collection_urn, "ResultsOutputCollection", mode="w", token=self.token) as results_collection: results_collection.Set(results_collection.Schema.RESULTS_SOURCE, self.urn) # Create all other hunt-related collections. for urn in [self.logs_collection_urn, self.all_clients_collection_urn, self.completed_clients_collection_urn, self.clients_errors_collection_urn, self.output_plugins_status_collection_urn, self.output_plugins_errors_collection_urn]: with aff4.FACTORY.Create(urn, "PackedVersionedCollection", mode="w", token=self.token): pass if not self.state.context.args.description: self.SetDescription()
def Start(self): """Initializes this hunt from arguments.""" self.state.context.Register("results_metadata_urn", self.urn.Add("ResultsMetadata")) self.state.context.Register("results_collection_urn", self.urn.Add("Results")) # TODO(user): Remove as soon as old style hunts (ones that use # RDFValueCollection) are removed. self.state.context.Register("results_collection", None) with aff4.FACTORY.Create(self.state.context.results_metadata_urn, "HuntResultsMetadata", mode="rw", token=self.token) as results_metadata: state = rdfvalue.FlowState() try: plugins = self.state.args.output_plugins except AttributeError: plugins = [] for index, plugin in enumerate(plugins): plugin_obj = plugin.GetPluginForHunt(self) state.Register("%s_%d" % (plugin.plugin_name, index), (plugin, plugin_obj.state)) results_metadata.Set(results_metadata.Schema.OUTPUT_PLUGINS(state)) # Create results collection. with aff4.FACTORY.Create(self.state.context.results_collection_urn, "ResultsOutputCollection", mode="w", token=self.token) as results_collection: results_collection.Set(results_collection.Schema.RESULTS_SOURCE, self.urn) # Create all other hunt-related collections. for urn in [ self.logs_collection_urn, self.all_clients_collection_urn, self.completed_clients_collection_urn, self.clients_errors_collection_urn, self.output_plugins_status_collection_urn, self.output_plugins_errors_collection_urn ]: with aff4.FACTORY.Create(urn, "PackedVersionedCollection", mode="w", token=self.token): pass if not self.state.context.args.description: self.SetDescription()
def __init__(self, source_urn=None, output_base_urn=None, args=None, token=None, state=None): """OutputPlugin constructor. Note that OutputPlugin constructor may run with security checks enabled (if they're enabled in the config). Therefore it's a bad idea to write anything to AFF4 in the constructor. Constructor should only be overriden if some non-self.state-stored class members should be initialized. Args: source_urn: URN of the data source to process the results from. output_base_urn: URN of the AFF4 volume where plugin will write output data (if needed). args: This plugin's arguments. token: Security token. state: Instance of rdfvalue.FlowState. Represents plugin's state. If this is passed, no initialization will be performed, only the state will be applied. Raises: ValueError: when state argument is passed together with args or token arguments. """ if state and (token or args): raise ValueError("'state' argument can't be passed together with 'args' " "or 'token'.") if not state: self.state = state or rdfvalue.FlowState() self.state.Register("source_urn", source_urn) self.state.Register("output_base_urn", output_base_urn) self.state.Register("args", args) self.state.Register("token", token) self.Initialize() else: self.state = state self.args = self.state.args self.token = self.state.token self.lock = threading.RLock()
def __init__(self, collection_urn, args=None, token=None, state=None): """HuntOutputPlugin constructor. HuntOutputPlugin constructor is called during StartHuntFlow and therefore runs with security checks enabled (if they're enabled in the config). Therefore it's a bad idea to write anything to AFF4 in the constructor. Args: collection_urn: URN of the collection which results are going to be processed. args: This plugin's arguments. token: Security token. state: Instance of rdfvalue.FlowState. Represents plugin's state. If this is passed, no initialization will be performed, only the state will be applied. Raises: ValueError: when state argument is passed together with args or token arguments. """ if state and (token or args): raise ValueError( "'state' argument can't be passed together with 'args' " "or 'token'.") if not state: self.state = state or rdfvalue.FlowState() self.state.Register("collection_urn", collection_urn) self.state.Register("args", args) self.state.Register("token", token) self.Initialize() else: self.state = state self.args = self.state.args self.token = self.state.token self.lock = threading.RLock()
def Start(self): """Initializes this hunt from arguments.""" self.state.context.Register("results_metadata_urn", self.urn.Add("ResultsMetadata")) self.state.context.Register("results_collection_urn", self.urn.Add("Results")) with aff4.FACTORY.Create(self.state.context.results_metadata_urn, "HuntResultsMetadata", mode="rw", token=self.token) as results_metadata: state = rdfvalue.FlowState() try: plugins = self.state.args.output_plugins except AttributeError: plugins = [] for index, plugin in enumerate(plugins): plugin_obj = plugin.GetPluginForHunt(self) state.Register("%s_%d" % (plugin.plugin_name, index), (plugin, plugin_obj.state)) results_metadata.Set(results_metadata.Schema.OUTPUT_PLUGINS(state)) with aff4.FACTORY.Create(self.state.context.results_collection_urn, "ResultsOutputCollection", mode="rw", token=self.token) as results_collection: results_collection.Set(results_collection.Schema.RESULTS_SOURCE, self.urn) self.state.context.Register("results_collection", results_collection) if not self.state.context.args.description: self.SetDescription()
def GenerateSample(self, number=0): res = rdfvalue.FlowState() res.Register("number", number) return res