Example #1
0
    def testRegexChangeNotification(self):
        """Test the AFF4RegexNotificationRule rule."""
        client_name = "C." + "0" * 16

        # Create the notification rule.
        rule_fd = aff4.FACTORY.Create("aff4:/config/aff4_rules/new_rule",
                                      aff4_type="AFF4RegexNotificationRule",
                                      token=self.token)
        rule_fd.Set(rule_fd.Schema.CLIENT_PATH_REGEX("b.*"))
        rule_fd.Set(rule_fd.Schema.EVENT_NAME("MockChangeEvent"))
        rule_fd.Set(rule_fd.Schema.NOTIFY_ONLY_IF_NEW(0))
        rule_fd.Close()

        # Force notification rules to be reloaded.
        aff4.FACTORY.UpdateNotificationRules()

        fd = aff4.FACTORY.Create(rdfvalue.ClientURN(client_name).Add("a"),
                                 token=self.token,
                                 aff4_type="AFF4Object")
        fd.Close()

        worker_mock = test_lib.MockWorker(token=self.token)
        while worker_mock.Next():
            pass

        # No notifications are expected, because path doesn't match the regex
        self.assertEqual(len(MockChangeEvent.CHANGED_URNS), 0)

        fd = aff4.FACTORY.Create(rdfvalue.ClientURN(client_name).Add("b"),
                                 token=self.token,
                                 aff4_type="AFF4Object")
        fd.Close()

        while worker_mock.Next():
            pass

        # Now we get a notification, because the path matches
        self.assertEqual(len(MockChangeEvent.CHANGED_URNS), 1)
        self.assertEqual(MockChangeEvent.CHANGED_URNS[0],
                         rdfvalue.ClientURN(client_name).Add("b"))

        MockChangeEvent.CHANGED_URNS = []

        # Write again to the same file and check that there's notification again
        fd = aff4.FACTORY.Create(rdfvalue.ClientURN(client_name).Add("b"),
                                 token=self.token,
                                 aff4_type="AFF4Object")
        fd.Close()

        while worker_mock.Next():
            pass

        self.assertEqual(len(MockChangeEvent.CHANGED_URNS), 1)
        self.assertEqual(MockChangeEvent.CHANGED_URNS[0],
                         rdfvalue.ClientURN(client_name).Add("b"))

        MockChangeEvent.CHANGED_URNS = []

        # Change the rule to notify only if file is written for the first time
        rule_fd = aff4.FACTORY.Open("aff4:/config/aff4_rules/new_rule",
                                    mode="rw",
                                    token=self.token)
        rule_fd.Set(rule_fd.Schema.NOTIFY_ONLY_IF_NEW, rdfvalue.RDFInteger(1))
        rule_fd.Close()

        # Force update of the rules in the factory
        aff4.FACTORY.UpdateNotificationRules()

        # Check that we don't get a notification for overwriting existing file
        fd = aff4.FACTORY.Create(rdfvalue.ClientURN(client_name).Add("b"),
                                 token=self.token,
                                 aff4_type="AFF4Object")
        fd.Close()

        while worker_mock.Next():
            pass

        self.assertEqual(len(MockChangeEvent.CHANGED_URNS), 0)

        # Check that we do get a notification for writing a new file
        fd = aff4.FACTORY.Create(rdfvalue.ClientURN(client_name).Add("b2"),
                                 token=self.token,
                                 aff4_type="AFF4Object")
        fd.Close()

        while worker_mock.Next():
            pass

        self.assertEqual(len(MockChangeEvent.CHANGED_URNS), 1)
        self.assertEqual(MockChangeEvent.CHANGED_URNS[0],
                         rdfvalue.ClientURN(client_name).Add("b2"))
Example #2
0
  def testMessageHandlerRequestLeasing(self):

    requests = [
        rdf_objects.MessageHandlerRequest(
            client_id="C.1000000000000000",
            handler_name="Testhandler",
            request_id=i * 100,
            request=rdfvalue.RDFInteger(i)) for i in range(10)
    ]
    lease_time = rdfvalue.Duration("5m")

    with test_lib.FakeTime(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(10000)):
      self.db.WriteMessageHandlerRequests(requests)

    t0 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100000)
    with test_lib.FakeTime(t0):
      t0_expiry = t0 + lease_time
      leased = self.db.LeaseMessageHandlerRequests(
          lease_time=lease_time, limit=5)

      self.assertEqual(len(leased), 5)

      for request in leased:
        self.assertEqual(request.leased_until, t0_expiry)
        self.assertEqual(request.leased_by, utils.ProcessIdString())

    t1 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100000 + 100)
    with test_lib.FakeTime(t1):
      t1_expiry = t1 + lease_time
      leased = self.db.LeaseMessageHandlerRequests(
          lease_time=lease_time, limit=5)

      self.assertEqual(len(leased), 5)

      for request in leased:
        self.assertEqual(request.leased_until, t1_expiry)
        self.assertEqual(request.leased_by, utils.ProcessIdString())

      # Nothing left to lease.
      leased = self.db.LeaseMessageHandlerRequests(
          lease_time=lease_time, limit=2)

      self.assertEqual(len(leased), 0)

    read = self.db.ReadMessageHandlerRequests()

    self.assertEqual(len(read), 10)
    for r in read:
      self.assertEqual(r.leased_by, utils.ProcessIdString())

    self.assertEqual(len([r for r in read if r.leased_until == t0_expiry]), 5)
    self.assertEqual(len([r for r in read if r.leased_until == t1_expiry]), 5)

    # Half the leases expired.
    t2 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100000 + 350)
    with test_lib.FakeTime(t2):
      leased = self.db.LeaseMessageHandlerRequests(lease_time=lease_time)

      self.assertEqual(len(leased), 5)

    # All of them expired.
    t3 = rdfvalue.RDFDatetime.FromSecondsSinceEpoch(100000 + 10350)
    with test_lib.FakeTime(t3):
      leased = self.db.LeaseMessageHandlerRequests(lease_time=lease_time)

      self.assertEqual(len(leased), 10)
Example #3
0
 def Flush(self):
     if self._dirty:
         self.Set(self.Schema.LAST_CHUNK,
                  rdfvalue.RDFInteger(self.last_chunk))
     super(AFF4SparseImage, self).Flush()
Example #4
0
 def Flush(self, sync=True):
   if self._dirty:
     self.Set(self.Schema.LAST_CHUNK, rdfvalue.RDFInteger(self.last_chunk))
   super(AFF4SparseIndex, self).Flush(sync=sync)
Example #5
0
    def testReceiveMessagesFleetspeak(self):
        service_name = "GRR"
        fake_service_client = _FakeGRPCServiceClient(service_name)

        fleetspeak_connector.Reset()
        fleetspeak_connector.Init(service_client=fake_service_client)

        fsd = fs_frontend_tool.GRRFSServer()

        grr_client_nr = 0xab
        grr_client_id_urn = self.SetupClient(grr_client_nr)

        flow_obj = self.FlowSetup(flow_test_lib.FlowOrderTest.__name__,
                                  grr_client_id_urn)

        num_msgs = 9

        session_id = flow_obj.session_id
        messages = [
            rdf_flows.GrrMessage(request_id=1,
                                 response_id=i,
                                 session_id=session_id,
                                 payload=rdfvalue.RDFInteger(i))
            for i in xrange(1, num_msgs + 1)
        ]

        fs_client_id = "\x10\x00\x00\x00\x00\x00\x00\xab"
        # fs_client_id should be equivalent to grr_client_id_urn
        self.assertEqual(
            fs_client_id,
            fleetspeak_utils.GRRIDToFleetspeakID(grr_client_id_urn.Basename()))

        fs_messages = [
            fs_common_pb2.Message(message_type="GrrMessage",
                                  source=fs_common_pb2.Address(
                                      client_id=fs_client_id,
                                      service_name=service_name))
            for _ in xrange(num_msgs)
        ]
        for fs_message, message in itertools.izip(fs_messages, messages):
            fs_message.data.Pack(message.AsPrimitiveProto())

        for msg in fs_messages:
            fsd.Process(msg, None)

        # Make sure the task is still on the client queue
        manager = queue_manager.QueueManager(token=self.token)
        tasks_on_client_queue = manager.Query(grr_client_id_urn, 100)
        self.assertEqual(len(tasks_on_client_queue), 1)

        want_messages = [message.Copy() for message in messages]
        for want_message in want_messages:
            # This is filled in by the frontend as soon as it gets the message.
            want_message.auth_state = (
                rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED)
            want_message.source = grr_client_id_urn

        stored_messages = data_store.DB.ReadResponsesForRequestId(
            session_id, 1)

        self.assertEqual(len(stored_messages), len(want_messages))

        stored_messages.sort(key=lambda m: m.response_id)
        # Check that messages were stored correctly
        for stored_message, want_message in itertools.izip(
                stored_messages, want_messages):
            stored_message.timestamp = None
            self.assertRDFValuesEqual(stored_message, want_message)
Example #6
0
 def testMultipliesAndIsMultipliedByByPrimitive(self):
     self.assertEqual(rdfvalue.RDFInteger(10) * 10, 100)
     self.assertEqual(10 * rdfvalue.RDFInteger(10), 100)
Example #7
0
 def testDividesAndIsDividableByPrimitiveInts(self):
     self.assertEqual(rdfvalue.RDFInteger(10) / 5, 2)
     self.assertEqual(100 / rdfvalue.RDFInteger(10), 10)
Example #8
0
 def testComparableToPrimiviteInts(self):
     self.assertEqual(rdfvalue.RDFInteger(10), 10)
     self.assertTrue(rdfvalue.RDFInteger(10) > 5)
     self.assertTrue(15 > rdfvalue.RDFInteger(10))
     self.assertTrue(rdfvalue.RDFInteger(10) < 15)
     self.assertTrue(5 < rdfvalue.RDFInteger(10))
Example #9
0
 def GenerateSample(self, number=0):
     return rdfvalue.RDFInteger(number)