def testNestedDictsOpaqueTypes(self): class UnSerializable(object): pass test_dict = dict( key1={"A": 1}, key2=rdf_protodict.Dict({ "A": 1 }), key3=[1, UnSerializable(), 3, [1, 2, [3]]], key4=[[], None, ["abc"]], key5=UnSerializable(), key6=["a", UnSerializable(), "b"]) self.assertRaises(TypeError, rdf_protodict.Dict, **test_dict) sample = rdf_protodict.Dict() for key, value in test_dict.iteritems(): sample.SetItem(key, value, raise_on_error=False) # Need to do some manual checking here since this is a lossy conversion. self.assertEqual(test_dict["key1"], sample["key1"]) self.assertEqual(test_dict["key2"], sample["key2"]) self.assertEqual(1, sample["key3"][0]) self.assertTrue("Unsupported type" in sample["key3"][1]) self.assertItemsEqual(test_dict["key3"][2:], sample["key3"][2:]) self.assertEqual(test_dict["key4"], sample["key4"]) self.assertTrue("Unsupported type" in sample["key5"]) self.assertEqual("a", sample["key6"][0]) self.assertTrue("Unsupported type" in sample["key6"][1]) self.assertEqual("b", sample["key6"][2])
def testSerialization(self): test_dict = dict( key1=1, # Integer. key2="foo", # String. key3=u"\u4f60\u597d", # Unicode. key5=rdfvalue.RDFDatetime.FromHumanReadable("2012/12/11"), # RDFValue. key6=None, # Support None Encoding. key7=rdf_structs.EnumNamedValue(5, name="Test"), # Enums. ) # Initialize through keywords. sample = rdf_protodict.Dict(**test_dict) self.CheckTestDict(test_dict, sample) # Initialize through dict. sample = rdf_protodict.Dict(test_dict) self.CheckTestDict(test_dict, sample) # Initialize through a serialized form. serialized = sample.SerializeToString() self.assertIsInstance(serialized, str) sample = rdf_protodict.Dict.FromSerializedString(serialized) self.CheckTestDict(test_dict, sample) # Convert to a dict. self.CheckTestDict(test_dict, sample.ToDict())
def testNestedDicts(self): test_dict = dict( key1={"A": 1}, key2=rdf_protodict.Dict({"A": 1}), ) sample = rdf_protodict.Dict(**test_dict) self.CheckTestDict(test_dict, sample) self.CheckTestDict(test_dict, sample.ToDict())
def testNestedDictsMultipleTypes(self): test_dict = dict(key1={"A": 1}, key2=rdf_protodict.Dict({"A": 1}), key3=[1, 2, 3, [1, 2, [3]]], key4=[[], None, ["abc"]], key5=set([1, 2, 3])) sample = rdf_protodict.Dict(**test_dict) self.CheckTestDict(test_dict, sample) to_dict = sample.ToDict() self.CheckTestDict(test_dict, to_dict) self.assertIsInstance(to_dict["key1"], dict)
def testOverwriting(self): req = rdf_client.Iterator(client_state=rdf_protodict.Dict({"A": 1})) # There should be one element now. self.assertEqual(len(list(req.client_state.items())), 1) req.client_state = rdf_protodict.Dict({"B": 2}) # Still one element. self.assertEqual(len(list(req.client_state.items())), 1) req.client_state = rdf_protodict.Dict({}) # And now it's gone. self.assertEqual(len(list(req.client_state.items())), 0)
def testWMIEventConsumerParserRaisesWhenNonEmptyDictReturnedEmpty(self): parser = wmi_parser.WMIActiveScriptEventConsumerParser() rdf_dict = rdf_protodict.Dict() rdf_dict["NonexistentField"] = "Abcdef" with self.assertRaises(ValueError): for output in parser.Parse(None, rdf_dict, None): self.assertEqual(output.__class__, rdf_anomaly.Anomaly)
def testWMIActiveScriptEventConsumerParser(self): parser = wmi_parser.WMIActiveScriptEventConsumerParser() rdf_dict = rdf_protodict.Dict() rdf_dict["CreatorSID"] = [ 1, 5, 0, 0, 0, 0, 0, 5, 21, 0, 0, 0, 152, 18, 57, 8, 206, 29, 80, 44, 70, 38, 82, 8, 244, 1, 0, 0 ] rdf_dict["KillTimeout"] = 0 rdf_dict["MachineName"] = None rdf_dict["MaximumQueueSize"] = None rdf_dict["Name"] = "SomeName" rdf_dict["ScriptFilename"] = None rdf_dict["ScriptingEngine"] = "VBScript" rdf_dict["ScriptText"] = r"""Dim objFS, objFile Set objFS = CreateObject("Scripting.FileSystemObject") Set objFile = objFS.OpenTextFile("C:\temp.log", 8, true) objFile.WriteLine "Time: " & Now & "; Entry made by: ASEC" objFile.WriteLine "Application closed. UserModeTime: " & TargetEvent.TargetInstance.UserModeTime &_ "; KernelModeTime: " & TargetEvent.TargetInstance.KernelModeTime & " [hundreds of nanoseconds]" objFile.Close""" result_list = list(parser.Parse(None, rdf_dict, None)) self.assertEqual(len(result_list), 1) result = result_list[0] self.assertEqual(result.CreatorSID, "S-1-5-21-137958040-743448014-139601478-500") self.assertEqual(result.MaximumQueueSize, 0) self.assertFalse(result.ScriptFilename)
def __init__(self, initializer=None, age=None, **kwargs): if isinstance(initializer, dict): conf = initializer initializer = None else: conf = kwargs super(Method, self).__init__(initializer=initializer, age=age) probe = conf.get("probe", {}) resource = conf.get("resource", {}) hint = conf.get("hint", {}) target = conf.get("target", {}) if hint: # Add the hint to children. for cfg in probe: cfg["hint"] = hints.Overlay(child=cfg.get("hint", {}), parent=hint) self.probe = [Probe(**cfg) for cfg in probe] self.hint = Hint(hint, reformat=False) self.match = MatchStrToList(kwargs.get("match")) self.matcher = Matcher(self.match, self.hint) self.resource = [rdf_protodict.Dict(**r) for r in resource] self.target = triggers.Target(**target) self.triggers = triggers.Triggers() for p in self.probe: # If the probe has a target, use it. Otherwise, use the method's target. target = p.target or self.target self.triggers.Add(p.artifact, target, p)
def Parse(self, stat, knowledge_base): _ = stat, knowledge_base test_dict = { "environ_temp": rdfvalue.RDFString("tempvalue"), "environ_path": rdfvalue.RDFString("pathvalue") } yield rdf_protodict.Dict(test_dict)
def ParseMultiple(self, stats, file_objects, knowledge_base): """Parse the found release files.""" _ = knowledge_base # Collate files into path: contents dictionary. found_files = self._Combine(stats, file_objects) # Determine collected files and apply weighting. weights = [w for w in self.WEIGHTS if w.path in found_files] weights = sorted(weights, key=lambda x: x.weight) for _, path, handler in weights: contents = found_files[path] obj = handler(contents) complete, result = obj.Parse() if result is None: continue elif complete: yield rdf_protodict.Dict({ 'os_release': result.release, 'os_major_version': result.major, 'os_minor_version': result.minor }) break else: # No successful parse. yield rdf_anomaly.Anomaly( type='PARSER_ANOMALY', symptom='Unable to determine distribution.')
def GetConfiguration(self, _): self.response_count += 1 return [ rdf_protodict.Dict({ "Client.server_urls": ["http://localhost:8001/"], "Client.poll_min": 1.0 }) ]
class WMIWin32NetworkAdapterConfigurationMock(object): """Mock netadapter.""" class UnSerializable(object): pass Caption = "[000005] Intel Gigabit Network Connection" DatabasePath = "%SystemRoot%\\System32\\drivers\\etc" DefaultIPGateway = ["192.168.1.254", "fe80::211:5eaa:fe00:222"] Description = "Intel Gigabit Network Connection" DHCPEnabled = True DHCPLeaseExpires = "20140825162259.123456-420" DHCPLeaseObtained = "20140825122259.123456-420" DHCPServer = "192.168.1.1" DNSDomain = "internal.example.com" DNSDomainSuffixSearchOrder = [ "blah.example.com", "ad.example.com", "internal.example.com", "example.com" ] DNSEnabledForWINSResolution = False DNSHostName = "MYHOST-WIN" DNSServerSearchOrder = ["192.168.1.1", "192.168.255.81", "192.168.128.88"] DomainDNSRegistrationEnabled = False FullDNSRegistrationEnabled = True GatewayCostMetric = [0, 256] Index = 7 InterfaceIndex = 11 IPAddress = [ "192.168.1.20", "ffff::ffff:aaaa:1111:aaaa", "dddd:0:8888:6666:bbbb:aaaa:eeee:bbbb", "dddd:0:8888:6666:bbbb:aaaa:ffff:bbbb" ] IPConnectionMetric = 10 IPEnabled = True IPFilterSecurityEnabled = False IPSecPermitIPProtocols = [] IPSecPermitTCPPorts = [] IPSecPermitUDPPorts = [] IPSubnet = ["255.255.254.0", "192", "168", "1"] MACAddress = "BB:AA:EE:CC:DD:CC" ServiceName = "e1e" SettingID = "{AAAAAAAA-EEEE-DDDD-AAAA-CCCCCCCCCCCC}" TcpipNetbiosOptions = 0 WINSEnableLMHostsLookup = True WINSScopeID = "" NestingTest = { "one": { "two": [3, 4], "broken": UnSerializable(), "three": {} }, "four": [], "five": "astring", "six": [None, None, ""], "seven": None, "rdfvalue": rdf_protodict.Dict(a="asdf") } OpaqueObject = UnSerializable()
def testOperatingSystemSelection(self): """Tests that we can distinguish based on operating system.""" self.SetupClient(1, system="Windows XP") self.SetupClient(2, system="Linux") self.SetupClient(3, system="Windows 7") with utils.Stubber(flow.GRRFlow, "StartFlow", self.StartFlow): # Now setup the filters now = rdfvalue.RDFDatetime.Now() expires = now + rdfvalue.Duration("1h") foreman_obj = foreman.GetForeman(token=self.token) # Make a new rule rule = foreman_rules.ForemanRule(created=now, expires=expires, description="Test rule") # Matches Windows boxes rule.client_rule_set = foreman_rules.ForemanClientRuleSet(rules=[ foreman_rules.ForemanClientRule( rule_type=foreman_rules.ForemanClientRule.Type.OS, os=foreman_rules.ForemanOsClientRule(os_windows=True)) ]) # Will run Test Flow rule.actions.Append(flow_name="Test Flow", argv=rdf_protodict.Dict(foo="bar")) # Clear the rule set and add the new rule to it. rule_set = foreman_obj.Schema.RULES() rule_set.Append(rule) # Assign it to the foreman foreman_obj.Set(foreman_obj.Schema.RULES, rule_set) foreman_obj.Close() self.clients_launched = [] foreman_obj.AssignTasksToClient("C.1000000000000001") foreman_obj.AssignTasksToClient("C.1000000000000002") foreman_obj.AssignTasksToClient("C.1000000000000003") # Make sure that only the windows machines ran self.assertEqual(len(self.clients_launched), 2) self.assertEqual(self.clients_launched[0][0], rdf_client.ClientURN("C.1000000000000001")) self.assertEqual(self.clients_launched[1][0], rdf_client.ClientURN("C.1000000000000003")) self.clients_launched = [] # Run again - This should not fire since it did already foreman_obj.AssignTasksToClient("C.1000000000000001") foreman_obj.AssignTasksToClient("C.1000000000000002") foreman_obj.AssignTasksToClient("C.1000000000000003") self.assertEqual(len(self.clients_launched), 0)
def WmiQuery(self, _): return [ rdf_protodict.Dict({ u"IdentifyingNumber": u"2RXYYZ1", u"Name": u"Latitude E7440", u"Vendor": u"Dell Inc.", u"Version": u"01", u"Caption": u"Computer System Product" }) ]
def testWMIEventConsumerParserDoesntFailOnUnknownField(self): parser = wmi_parser.WMIActiveScriptEventConsumerParser() rdf_dict = rdf_protodict.Dict() rdf_dict["NonexistentField"] = "Abcdef" rdf_dict["Name"] = "Test event consumer" results = list(parser.Parse(None, rdf_dict, None)) self.assertEqual(2, len(results)) # Anomalies yield first self.assertEqual(results[0].__class__, rdf_anomaly.Anomaly) self.assertEqual(results[1].__class__, rdf_wmi.WMIActiveScriptEventConsumer)
def testDictBehaviour(self): tested = rdf_protodict.Dict(a=1) now = rdfvalue.RDFDatetime.Now() tested["b"] = now self.assertEqual(tested["b"], now) self.assertEqual(tested["a"], 1) tested["b"] = rdfvalue.RDFURN("aff4:/users/") self.assertEqual(len(tested), 2) self.assertEqual(tested["b"].SerializeToString(), "aff4:/users")
def testGetConfig(self): """Check GetConfig client action works.""" # Use UpdateConfig to generate a config. location = ["http://example.com/"] request = rdf_protodict.Dict() request["Client.server_urls"] = location request["Client.foreman_check_frequency"] = 3600 self.RunAction(admin.UpdateConfiguration, request) # Check that our GetConfig actually gets the real data. self.RunAction(admin.GetConfiguration) self.assertEqual(config.CONFIG["Client.foreman_check_frequency"], 3600) self.assertEqual(config.CONFIG["Client.server_urls"], location)
def testArgs(self): """Test passing arguments.""" utils.TEST_VAL = "original" python_code = """ magic_return_str = py_args['test'] utils.TEST_VAL = py_args[43] """ signed_blob = rdf_crypto.SignedBlob() signed_blob.Sign(python_code, self.signing_key) pdict = rdf_protodict.Dict({"test": "dict_arg", 43: "dict_arg2"}) request = rdf_client.ExecutePythonRequest(python_code=signed_blob, py_args=pdict) result = self.RunAction(standard.ExecutePython, request)[0] self.assertEqual(result.return_val, "dict_arg") self.assertEqual(utils.TEST_VAL, "dict_arg2")
def testWMIEventConsumerParserDoesntFailOnMalformedSIDs(self): parser = wmi_parser.WMIActiveScriptEventConsumerParser() rdf_dict = rdf_protodict.Dict() tests = [ [1, 5, 0, 0, 0, 0, 0, 5, 21, 0, 0], "(1, 2, 3)", # Older clients (3.0.0.3) return a the SID like this 1, { 1: 2 }, (1, 2) ] for test in tests: rdf_dict["CreatorSID"] = test result_list = list(parser.Parse(None, rdf_dict, None)) self.assertEqual(len(result_list), 1)
def testWMICommandLineEventConsumerParser(self): parser = wmi_parser.WMICommandLineEventConsumerParser() rdf_dict = rdf_protodict.Dict() rdf_dict["CommandLineTemplate"] = "cscript KernCap.vbs" rdf_dict["CreateNewConsole"] = False rdf_dict["CreateNewProcessGroup"] = False rdf_dict["CreateSeparateWowVdm"] = False rdf_dict["CreateSharedWowVdm"] = False rdf_dict["CreatorSID"] = [ 1, 5, 0, 0, 0, 0, 0, 5, 21, 0, 0, 0, 133, 116, 119, 185, 124, 13, 122, 150, 111, 189, 41, 154, 244, 1, 0, 0 ] rdf_dict["DesktopName"] = None rdf_dict["ExecutablePath"] = None rdf_dict["FillAttribute"] = None rdf_dict["ForceOffFeedback"] = False rdf_dict["ForceOnFeedback"] = False rdf_dict["KillTimeout"] = 0 rdf_dict["MachineName"] = None rdf_dict["MaximumQueueSize"] = None rdf_dict["Name"] = "BVTConsumer" rdf_dict["Priority"] = 32 rdf_dict["RunInteractively"] = False rdf_dict["ShowWindowCommand"] = None rdf_dict["UseDefaultErrorMode"] = False rdf_dict["WindowTitle"] = None rdf_dict["WorkingDirectory"] = "C:\\tools\\kernrate" rdf_dict["XCoordinate"] = None rdf_dict["XNumCharacters"] = None rdf_dict["XSize"] = None rdf_dict["YCoordinate"] = None rdf_dict["YNumCharacters"] = None rdf_dict["YSize"] = None result_list = list(parser.Parse(None, rdf_dict, None)) self.assertEqual(len(result_list), 1) result = result_list[0] self.assertEqual(result.CreatorSID, "S-1-5-21-3111613573-2524581244-2586426735-500") self.assertEqual(result.CommandLineTemplate, "cscript KernCap.vbs") self.assertEqual(result.Name, "BVTConsumer") self.assertEqual(result.KillTimeout, 0) self.assertEqual(result.FillAttribute, 0) self.assertEqual(result.FillAttributes, 0) self.assertFalse(result.ForceOffFeedback) self.assertFalse(result.ForceOnFeedback)
def RunWMIQuery(query, baseobj=r"winmgmts:\root\cimv2"): """Run a WMI query and return a result. Args: query: the WMI query to run. baseobj: the base object for the WMI query. Yields: rdf_protodict.Dicts containing key value pairs from the resulting COM objects. """ pythoncom.CoInitialize() # Needs to be called if using com from a thread. wmi_obj = win32com.client.GetObject(baseobj) # This allows our WMI to do some extra things, in particular # it gives it access to find the executable path for all processes. wmi_obj.Security_.Privileges.AddAsString("SeDebugPrivilege") # Run query try: query_results = wmi_obj.ExecQuery(query) except pythoncom.com_error as e: raise RuntimeError("Failed to run WMI query \'%s\' err was %s" % (query, e)) # Extract results from the returned COMObject and return dicts. try: for result in query_results: response = rdf_protodict.Dict() properties = (list(result.Properties_) + list(getattr(result, "SystemProperties_", []))) for prop in properties: if prop.Name not in IGNORE_PROPS: # Protodict can handle most of the types we care about, but we may # get some objects that we don't know how to serialize, so we tell the # dict to set the value to an error message and keep going response.SetItem(prop.Name, prop.Value, raise_on_error=False) yield response except pythoncom.com_error as e: raise RuntimeError("WMI query data error on query \'%s\' err was %s" % (e, query))
def testInterfaceParsing(self): parser = wmi_parser.WMIInterfacesParser() rdf_dict = rdf_protodict.Dict() mock_config = client_test_lib.WMIWin32NetworkAdapterConfigurationMock wmi_properties = mock_config.__dict__.iteritems() for key, value in wmi_properties: if not key.startswith("__"): try: rdf_dict[key] = value except TypeError: rdf_dict[key] = "Failed to encode: %s" % value result_list = list(parser.Parse(None, rdf_dict, None)) self.assertEqual(len(result_list), 2) for result in result_list: if isinstance(result, rdf_client.Interface): self.assertEqual(len(result.addresses), 4) self.assertItemsEqual( [x.human_readable_address for x in result.addresses], [ "192.168.1.20", "ffff::ffff:aaaa:1111:aaaa", "dddd:0:8888:6666:bbbb:aaaa:eeee:bbbb", "dddd:0:8888:6666:bbbb:aaaa:ffff:bbbb" ]) self.assertItemsEqual([ x.human_readable_address for x in result.dhcp_server_list ], ["192.168.1.1"]) self.assertEqual( result.dhcp_lease_expires.AsMicrosecondsSinceEpoch(), 1409008979123456) self.assertEqual( result.dhcp_lease_obtained.AsMicrosecondsSinceEpoch(), 1408994579123456) elif isinstance(result, rdf_client.DNSClientConfiguration): self.assertItemsEqual( result.dns_server, ["192.168.1.1", "192.168.255.81", "192.168.128.88"]) self.assertItemsEqual(result.dns_suffix, [ "blah.example.com", "ad.example.com", "internal.example.com", "example.com" ])
def testRdfFormatterFanOut(self): rdf = rdf_protodict.Dict() user1 = rdf_client.User(username="******") user2 = rdf_client.User(username="******") rdf["cataclysm"] = "GreyGoo" rdf["thinkers"] = [user1, user2] rdf["reference"] = { "ecophage": ["bots", ["nanobots", ["picobots"]]], "doomsday": { "books": ["cats cradle", "prey"] } } template = ("{cataclysm}; {thinkers.username}; {reference.ecophage}; " "{reference.doomsday}\n") hinter = hints.Hinter(template=template) expected = ("GreyGoo; drexler,joy; bots,nanobots,picobots; " "books:cats cradle,prey") result = hinter.Render(rdf) self.assertEqual(expected, result)
def WmiQuery(self, query): if query.query == u"SELECT * FROM Win32_LogicalDisk": self.response_count += 1 return client_fixture.WMI_SAMPLE elif query.query.startswith("Select * " "from Win32_NetworkAdapterConfiguration"): self.response_count += 1 rdf_dict = rdf_protodict.Dict() mock = client_test_lib.WMIWin32NetworkAdapterConfigurationMock wmi_properties = mock.__dict__.iteritems() for key, value in wmi_properties: if not key.startswith("__"): try: rdf_dict[key] = value except TypeError: rdf_dict[key] = "Failed to encode: %s" % value return [rdf_dict] else: return None
def testUpdateConfiguration(self): """Test that we can update the config.""" # A unique name on the filesystem for the writeback. self.config_file = os.path.join(self.temp_dir, "ConfigActionTest.yaml") # In a real client, the writeback location should be set to something real, # but for this test we make it the same as the config file.. config.CONFIG.SetWriteBack(self.config_file) # Make sure the file is gone self.assertRaises(IOError, open, self.config_file) location = ["http://www.example1.com/", "http://www.example2.com/"] request = rdf_protodict.Dict() request["Client.server_urls"] = location request["Client.foreman_check_frequency"] = 3600 result = self.RunAction(admin.UpdateConfiguration, request) self.assertEqual(result, []) self.assertEqual(config.CONFIG["Client.foreman_check_frequency"], 3600) # Test the config file got written. data = open(self.config_file, "rb").read() self.assertTrue("server_urls: {0}".format(",".join(location)) in data) self.urls = [] # Now test that our location was actually updated. def FakeUrlOpen(url=None, data=None, **_): self.urls.append(url) response = requests.Response() response.status_code = 200 response._content = data return response with utils.Stubber(requests, "request", FakeUrlOpen): client_context = comms.GRRHTTPClient(worker_cls=MockClientWorker) client_context.MakeRequest("") # Since the request is successful we only connect to one location. self.assertTrue(location[0] in self.urls[0])
def testUpdateConfigBlacklist(self): """Tests that disallowed fields are not getting updated.""" with test_lib.ConfigOverrider({ "Client.server_urls": ["http://something.com/"], "Client.server_serial_number": 1 }): location = ["http://www.example.com"] request = rdf_protodict.Dict() request["Client.server_urls"] = location request["Client.server_serial_number"] = 10 with self.assertRaises(ValueError): self.RunAction(admin.UpdateConfiguration, request) # Nothing was updated. self.assertEqual(config.CONFIG["Client.server_urls"], ["http://something.com/"]) self.assertEqual(config.CONFIG["Client.server_serial_number"], 1)
def testUpdateConfig(self): """Ensure we can retrieve and update the config.""" # Write a client without a proper system so we don't need to # provide the os specific artifacts in the interrogate flow below. client_id = self.SetupClient(0, system="") # Only mock the pieces we care about. client_mock = action_mocks.ActionMock(admin.GetConfiguration, admin.UpdateConfiguration) loc = "http://www.example.com" new_config = rdf_protodict.Dict({ "Client.server_urls": [loc], "Client.foreman_check_frequency": 3600, "Client.poll_min": 1 }) # Setting config options is disallowed in tests so we need to temporarily # revert this. with utils.Stubber(config.CONFIG, "Set", config.CONFIG.Set.old_target): # Write the config. flow_test_lib.TestFlowHelper( administrative.UpdateConfiguration.__name__, client_mock, client_id=client_id, token=self.token, config=new_config) # Now retrieve it again to see if it got written. flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock, token=self.token, client_id=client_id) fd = aff4.FACTORY.Open(client_id, token=self.token) config_dat = fd.Get(fd.Schema.GRR_CONFIGURATION) self.assertEqual(config_dat["Client.server_urls"], [loc]) self.assertEqual(config_dat["Client.poll_min"], 1)
def __init__(self, request=None, responses=None): self.status = None # A GrrStatus rdfvalue object. self.success = True self.request = request if request: self.request_data = rdf_protodict.Dict(request.data) self._responses = [] self._dropped_responses = [] if responses: # This may not be needed if we can assume that responses are # returned in lexical order from the data_store. responses.sort(key=operator.attrgetter("response_id")) # The iterator that was returned as part of these responses. This should # be passed back to actions that expect an iterator. self.iterator = None # Filter the responses by authorized states for msg in responses: # Check if the message is authenticated correctly. if msg.auth_state != msg.AuthorizationState.AUTHENTICATED: logging.warning( "%s: Messages must be authenticated (Auth state %s)", msg.session_id, msg.auth_state) self._dropped_responses.append(msg) # Skip this message - it is invalid continue # Check for iterators if msg.type == msg.Type.ITERATOR: self.iterator = rdf_client.Iterator(msg.payload) continue # Look for a status message if msg.type == msg.Type.STATUS: # Our status is set to the first status message that we see in # the responses. We ignore all other messages after that. self.status = rdf_flows.GrrStatus(msg.payload) # Check this to see if the call succeeded self.success = self.status.status == self.status.ReturnedStatus.OK # Ignore all other messages break # Use this message self._responses.append(msg) if self.status is None: # This is a special case of de-synchronized messages. if self._dropped_responses: logging.error( "De-synchronized messages detected:\n %s", "\n".join([ utils.SmartUnicode(x) for x in self._dropped_responses ])) if responses: self._LogFlowState(responses) raise FlowError("No valid Status message.") # This is the raw message accessible while going through the iterator self.message = None
def testWMIEventConsumerParser_EmptyConsumersYieldBlank(self): parser = wmi_parser.WMIActiveScriptEventConsumerParser() rdf_dict = rdf_protodict.Dict() result_list = list(parser.Parse(None, rdf_dict, None)) self.assertEqual(1, len(result_list)) self.assertEqual(True, not result_list[0])
def testIntegerComparisons(self): """Tests that we can use integer matching rules on the foreman.""" base_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1336480583.077736) boot_time = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1336300000.000000) self.SetupClient(0x11, system="Windows XP", install_time=base_time) self.SetupClient(0x12, system="Windows 7", install_time=base_time) # This one was installed one week earlier. one_week_ago = base_time - rdfvalue.Duration("1w") self.SetupClient(0x13, system="Windows 7", install_time=one_week_ago) self.SetupClient(0x14, system="Windows 7", last_boot_time=boot_time) with utils.Stubber(flow, "StartFlow", self.StartFlow): # Now setup the filters now = rdfvalue.RDFDatetime.Now() expires = now + rdfvalue.Duration("1h") foreman_obj = foreman.GetForeman(token=self.token) # Make a new rule rule = foreman_rules.ForemanRule( created=now, expires=expires, description="Test rule(old)") # Matches the old client one_hour_ago = base_time - rdfvalue.Duration("1h") rule.client_rule_set = foreman_rules.ForemanClientRuleSet(rules=[ foreman_rules.ForemanClientRule( rule_type=foreman_rules.ForemanClientRule.Type.INTEGER, integer=foreman_rules.ForemanIntegerClientRule( field="INSTALL_TIME", operator=foreman_rules.ForemanIntegerClientRule.Operator. LESS_THAN, value=one_hour_ago.AsSecondsSinceEpoch())) ]) old_flow = "Test flow for old clients" # Will run Test Flow rule.actions.Append( flow_name=old_flow, argv=rdf_protodict.Dict(dict(foo="bar"))) # Clear the rule set and add the new rule to it. rule_set = foreman_obj.Schema.RULES() rule_set.Append(rule) # Make a new rule rule = foreman_rules.ForemanRule( created=now, expires=expires, description="Test rule(new)") # Matches the newer clients rule.client_rule_set = foreman_rules.ForemanClientRuleSet(rules=[ foreman_rules.ForemanClientRule( rule_type=foreman_rules.ForemanClientRule.Type.INTEGER, integer=foreman_rules.ForemanIntegerClientRule( field="INSTALL_TIME", operator=foreman_rules.ForemanIntegerClientRule.Operator. GREATER_THAN, value=one_hour_ago.AsSecondsSinceEpoch())) ]) new_flow = "Test flow for newer clients" # Will run Test Flow rule.actions.Append( flow_name=new_flow, argv=rdf_protodict.Dict(dict(foo="bar"))) rule_set.Append(rule) # Make a new rule rule = foreman_rules.ForemanRule( created=now, expires=expires, description="Test rule(eq)") # Note that this also tests the handling of nonexistent attributes. rule.client_rule_set = foreman_rules.ForemanClientRuleSet(rules=[ foreman_rules.ForemanClientRule( rule_type=foreman_rules.ForemanClientRule.Type.INTEGER, integer=foreman_rules.ForemanIntegerClientRule( field="LAST_BOOT_TIME", operator="EQUAL", value=boot_time.AsSecondsSinceEpoch())) ]) eq_flow = "Test flow for LAST_BOOT_TIME" rule.actions.Append( flow_name=eq_flow, argv=rdf_protodict.Dict(dict(foo="bar"))) rule_set.Append(rule) # Assign it to the foreman foreman_obj.Set(foreman_obj.Schema.RULES, rule_set) foreman_obj.Close() self.clients_launched = [] foreman_obj.AssignTasksToClient("C.1000000000000011") foreman_obj.AssignTasksToClient("C.1000000000000012") foreman_obj.AssignTasksToClient("C.1000000000000013") foreman_obj.AssignTasksToClient("C.1000000000000014") # Make sure that the clients ran the correct flows. self.assertEqual(len(self.clients_launched), 4) self.assertEqual(self.clients_launched[0][0], rdf_client.ClientURN("C.1000000000000011")) self.assertEqual(self.clients_launched[0][1], new_flow) self.assertEqual(self.clients_launched[1][0], rdf_client.ClientURN("C.1000000000000012")) self.assertEqual(self.clients_launched[1][1], new_flow) self.assertEqual(self.clients_launched[2][0], rdf_client.ClientURN("C.1000000000000013")) self.assertEqual(self.clients_launched[2][1], old_flow) self.assertEqual(self.clients_launched[3][0], rdf_client.ClientURN("C.1000000000000014")) self.assertEqual(self.clients_launched[3][1], eq_flow)