コード例 #1
0
ファイル: approval_checks_test.py プロジェクト: qsdj/grr
    def testWhenAuthMgrActiveChecksApproversForEachClientLabel(self, mock_mgr):
        data_store.REL_DB.AddClientLabels(self.client.client_id, "GRR",
                                          ["foo", "bar"])

        approval_request = self._CreateRequest(grants=[
            rdf_objects.ApprovalGrant(grantor_username="******"),
            rdf_objects.ApprovalGrant(grantor_username="******")
        ])

        # Make sure approval manager is active.
        mock_mgr.IsActive.return_value = True

        approval_checks.CheckApprovalRequest(approval_request)

        self.assertEqual(len(mock_mgr.CheckApproversForLabel.mock_calls), 2)

        args = mock_mgr.CheckApproversForLabel.mock_calls[0][1]
        self.assertEqual(args,
                         (access_control.ACLToken(username="******"),
                          rdfvalue.RDFURN(self.client.client_id), "requestor",
                          set(["grantor1", "grantor2"]), "bar"))
        args = mock_mgr.CheckApproversForLabel.mock_calls[1][1]
        self.assertEqual(args,
                         (access_control.ACLToken(username="******"),
                          rdfvalue.RDFURN(self.client.client_id), "requestor",
                          set(["grantor1", "grantor2"]), "foo"))
コード例 #2
0
ファイル: gui_test_lib.py プロジェクト: qsdj/grr
    def CreateGenericHuntWithCollection(self, values=None):
        self.client_ids = self.SetupClients(10)

        CreateFileVersion(self.client_ids[0],
                          "fs/os/c/bin/bash",
                          token=self.token)

        if values is None:
            values = [
                rdfvalue.RDFURN("aff4:/sample/1"),
                rdfvalue.RDFURN("aff4:/%s/fs/os/c/bin/bash" %
                                self.client_ids[0].Basename()),
                rdfvalue.RDFURN("aff4:/sample/3")
            ]

        with implementation.GRRHunt.StartHunt(
                hunt_name=standard.GenericHunt.__name__,
                client_rule_set=self._CreateForemanClientRuleSet(),
                output_plugins=[],
                token=self.token) as hunt:

            runner = hunt.GetRunner()
            runner.Start()

            collection = hunt.ResultCollection()
            with data_store.DB.GetMutationPool() as pool:
                for value in values:
                    collection.Add(rdf_flows.GrrMessage(
                        payload=value, source=self.client_ids[0]),
                                   mutation_pool=pool)

            return hunt.urn
コード例 #3
0
    def testRDFURN(self):
        """Test RDFURN handling."""
        # Make a url object
        str_url = "aff4:/hunts/W:AAAAAAAA/Results"
        url = rdfvalue.RDFURN(str_url, age=1)
        self.assertEqual(url.age, 1)
        self.assertEqual(url.Path(), "/hunts/W:AAAAAAAA/Results")
        self.assertEqual(str(url), str_url)
        self.assertEqual(url.scheme, "aff4")

        # Test the Add() function
        url = url.Add("some", age=2).Add("path", age=3)
        self.assertEqual(url.age, 3)
        self.assertEqual(url.Path(), "/hunts/W:AAAAAAAA/Results/some/path")
        self.assertEqual(str(url), utils.Join(str_url, "some", "path"))

        # Test that we can handle urns with a '?' and do not interpret them as
        # a delimiter between url and parameter list.
        str_url = "aff4:/C.0000000000000000/fs/os/c/regex.*?]&[+{}--"
        url = rdfvalue.RDFURN(str_url, age=1)
        self.assertEqual(url.Path(), str_url[5:])

        # Some more special characters...
        for path in ["aff4:/test/?#asd", "aff4:/test/#asd", "aff4:/test/?#"]:
            self.assertEqual(path, str(rdfvalue.RDFURN(path)))
コード例 #4
0
ファイル: flow_test.py プロジェクト: rainser/grr
    def testFlowStoresResultsPerType(self):
        flow_urn = flow_test_lib.TestFlowHelper(
            FlowWithMultipleResultTypes.__name__,
            action_mocks.ActionMock(),
            token=self.token,
            client_id=self.client_id)

        c = flow.GRRFlow.TypedResultCollectionForFID(flow_urn)
        self.assertEqual(
            set(c.ListStoredTypes()),
            set([
                rdfvalue.RDFInteger.__name__, rdfvalue.RDFString.__name__,
                rdfvalue.RDFURN.__name__
            ]))
        self.assertEqual(c.LengthByType(rdfvalue.RDFInteger.__name__), 1)
        self.assertEqual(c.LengthByType(rdfvalue.RDFString.__name__), 2)
        self.assertEqual(c.LengthByType(rdfvalue.RDFURN.__name__), 3)

        self.assertListEqual(
            [v.payload for _, v in c.ScanByType(rdfvalue.RDFInteger.__name__)],
            [rdfvalue.RDFInteger(42)])
        self.assertListEqual(
            [v.payload for _, v in c.ScanByType(rdfvalue.RDFString.__name__)],
            [rdfvalue.RDFString("foo bar"),
             rdfvalue.RDFString("foo1 bar1")])
        self.assertListEqual(
            [v.payload for _, v in c.ScanByType(rdfvalue.RDFURN.__name__)], [
                rdfvalue.RDFURN("foo/bar"),
                rdfvalue.RDFURN("foo1/bar1"),
                rdfvalue.RDFURN("foo2/bar2")
            ])
コード例 #5
0
    def testDuplicatedAddDatastore(self):
        sources = self.sources

        self.assertTrue(
            sources.AddDatastore(rdfvalue.RDFURN("aff4:/artifacts")))
        self.assertFalse(
            sources.AddDatastore(rdfvalue.RDFURN("aff4:/artifacts")))
コード例 #6
0
  def testAFF4Path(self):
    """Test the pathspec to URN conversion function."""
    pathspec = rdf_paths.PathSpec(
        path="\\\\.\\Volume{1234}\\",
        pathtype=rdf_paths.PathSpec.PathType.OS,
        mount_point="/c:/").Append(
            path="/windows", pathtype=rdf_paths.PathSpec.PathType.TSK)

    urn = pathspec.AFF4Path(rdf_client.ClientURN("C.1234567812345678"))
    self.assertEqual(
        urn,
        rdfvalue.RDFURN(
            r"aff4:/C.1234567812345678/fs/tsk/\\.\Volume{1234}\/windows"))

    # Test an ADS
    pathspec = rdf_paths.PathSpec(
        path="\\\\.\\Volume{1234}\\",
        pathtype=rdf_paths.PathSpec.PathType.OS,
        mount_point="/c:/").Append(
            pathtype=rdf_paths.PathSpec.PathType.TSK,
            path="/Test Directory/notes.txt:ads",
            inode=66,
            ntfs_type=128,
            ntfs_id=2)

    urn = pathspec.AFF4Path(rdf_client.ClientURN("C.1234567812345678"))
    self.assertEqual(
        urn,
        rdfvalue.RDFURN(r"aff4:/C.1234567812345678/fs/tsk/\\.\Volume{1234}\/"
                        "Test Directory/notes.txt:ads"))
コード例 #7
0
    def _GetRemotePublicKey(self, common_name):
        try:
            # See if we have this client already cached.
            remote_key = self.pub_key_cache.Get(str(common_name))
            stats.STATS.IncrementCounter("grr_pub_key_cache", fields=["hits"])
            return remote_key
        except KeyError:
            stats.STATS.IncrementCounter("grr_pub_key_cache",
                                         fields=["misses"])

        # Fetch the client's cert and extract the key.
        client = aff4.FACTORY.Create(common_name,
                                     aff4.AFF4Object.classes["VFSGRRClient"],
                                     mode="rw",
                                     token=self.token)
        cert = client.Get(client.Schema.CERT)
        if not cert:
            stats.STATS.IncrementCounter("grr_unique_clients")
            raise communicator.UnknownClientCert("Cert not found")

        if rdfvalue.RDFURN(cert.GetCN()) != rdfvalue.RDFURN(common_name):
            logging.error("Stored cert mismatch for %s", common_name)
            raise communicator.UnknownClientCert("Stored cert mismatch")

        self.client_cache.Put(common_name, client)
        stats.STATS.SetGaugeValue("grr_frontendserver_client_cache_size",
                                  len(self.client_cache))

        pub_key = cert.GetPublicKey()
        self.pub_key_cache.Put(common_name, pub_key)
        return pub_key
コード例 #8
0
ファイル: flow_test.py プロジェクト: rainser/grr
 def End(self, responses):
     self.SendReply(rdfvalue.RDFInteger(42))
     self.SendReply(rdfvalue.RDFString("foo bar"))
     self.SendReply(rdfvalue.RDFString("foo1 bar1"))
     self.SendReply(rdfvalue.RDFURN("aff4:/foo/bar"))
     self.SendReply(rdfvalue.RDFURN("aff4:/foo1/bar1"))
     self.SendReply(rdfvalue.RDFURN("aff4:/foo2/bar2"))
コード例 #9
0
ファイル: foreman.py プロジェクト: qsdj/grr
  def _CheckIfHuntTaskWasAssigned(self, client_id, hunt_id):
    """Will return True if hunt's task was assigned to this client before."""
    client_urn = rdfvalue.RDFURN(client_id)
    for _ in aff4.FACTORY.Stat([
        client_urn.Add("flows/%s:hunt" % rdfvalue.RDFURN(hunt_id).Basename())
    ]):
      return True

    return False
コード例 #10
0
    def testHashing(self):

        m = {}
        urn1 = rdfvalue.RDFURN("aff4:/test1")
        urn2 = rdfvalue.RDFURN("aff4:/test2")

        m[urn1] = 1
        self.assertIn(urn1, m)
        self.assertNotIn(urn2, m)
コード例 #11
0
    def testGetDatastores(self):
        sources = self.sources

        self.assertTrue(sources.AddDatastore(rdfvalue.RDFURN("aff4:/foos")))
        self.assertTrue(sources.AddDatastore(rdfvalue.RDFURN("aff4:/bars")))

        datastores = list(sources.GetDatastores())
        self.assertIn(rdfvalue.RDFURN("aff4:/foos"), datastores)
        self.assertIn(rdfvalue.RDFURN("aff4:/bars"), datastores)
コード例 #12
0
    def setUp(self):
        super(ApplyPluginToMultiTypeCollectionTest, self).setUp()
        self.plugin = test_plugins.TestInstantOutputPlugin(
            source_urn=rdfvalue.RDFURN("aff4:/foo/bar"), token=self.token)

        self.client_id = self.SetupClient(0)
        self.pool = data_store.DB.GetMutationPool()
        self.collection = multi_type_collection.MultiTypeCollection(
            rdfvalue.RDFURN("aff4:/mt_collection/testAddScan"))
コード例 #13
0
ファイル: flows_test.py プロジェクト: rainser/grr
 def testBadStructure(self):
     self.assertRaises(rdfvalue.InitializeError, rdfvalue.SessionID,
                       rdfvalue.RDFURN("aff4:/flows/A:123456:1:"))
     self.assertRaises(rdfvalue.InitializeError, rdfvalue.SessionID,
                       rdfvalue.RDFURN("aff4:/flows/A:123456::"))
     self.assertRaises(rdfvalue.InitializeError, rdfvalue.SessionID,
                       rdfvalue.RDFURN("aff4:/flows/A:123456:"))
     self.assertRaises(rdfvalue.InitializeError, rdfvalue.SessionID,
                       rdfvalue.RDFURN("aff4:/flows/A:"))
     self.assertRaises(rdfvalue.InitializeError, rdfvalue.SessionID,
                       rdfvalue.RDFURN("aff4:/flows/:"))
コード例 #14
0
ファイル: objects.py プロジェクト: qsdj/grr
    def ToURN(self):
        """Converts a reference into an URN."""

        if self.path_type in [PathInfo.PathType.OS, PathInfo.PathType.TSK]:
            return rdfvalue.RDFURN(self.client_id).Add("fs").Add(
                self.path_type.name.lower()).Add("/".join(
                    self.path_components))
        elif self.path_type == PathInfo.PathType.REGISTRY:
            return rdfvalue.RDFURN(self.client_id).Add("registry").Add(
                "/".join(self.path_components))
        elif self.path_type == PathInfo.PathType.TEMP:
            return rdfvalue.RDFURN(self.client_id).Add("temp").Add("/".join(
                self.path_components))

        raise ValueError("Unsupported path type: %s" % self.path_type)
コード例 #15
0
def ApprovalRevokeRaw(aff4_path, token):
    """Revokes an approval for a given token.

  This method requires raw datastore access to manipulate approvals directly.

  Args:
    aff4_path: The aff4_path or client id the approval should be created for.
    token: The token that should be revoked.
  """
    try:
        urn = rdf_client.ClientURN(aff4_path)
    except type_info.TypeValueError:
        urn = rdfvalue.RDFURN(aff4_path)

    approval_urn = aff4.ROOT_URN.Add("ACL").Add(urn.Path()).Add(
        token.username).Add(utils.EncodeReasonString(token.reason))

    super_token = access_control.ACLToken(username="******")
    super_token.supervisor = True

    approval_request = aff4.FACTORY.Open(approval_urn,
                                         mode="rw",
                                         token=super_token)
    approval_request.DeleteAttribute(approval_request.Schema.APPROVER)
    approval_request.Close()
コード例 #16
0
ファイル: objects_test.py プロジェクト: rainser/grr
 def testRegistryPathIsConvertedToURNCorrectly(self):
     v = rdf_objects.VfsFileReference(client_id=self.client_id,
                                      path_type="REGISTRY",
                                      path_components=["a", "b", "c"])
     self.assertEqual(
         v.ToURN(),
         rdfvalue.RDFURN("aff4:/%s/registry/a/b/c" % self.client_id))
コード例 #17
0
ファイル: security.py プロジェクト: rainser/grr
    def __init__(self,
                 reason=None,
                 subject_urn=None,
                 approver=None,
                 email_cc_address=None,
                 token=None):
        super(ApprovalRequestor, self).__init__()

        if not reason:
            raise ValueError("reason can't be empty.")
        self.reason = reason

        if not subject_urn:
            raise ValueError("subject_urn can't be empty.")
        self.subject_urn = rdfvalue.RDFURN(subject_urn)

        if not approver:
            raise ValueError("approver can't be empty.")
        self.approver = approver

        self.email_cc_address = email_cc_address

        if not token:
            raise ValueError("token can't be empty.")
        self.token = token
コード例 #18
0
def UploadRaw(file_path, aff4_path, token=None):
    """Upload a file to the datastore."""
    full_path = rdfvalue.RDFURN(aff4_path).Add(os.path.basename(file_path))
    fd = aff4.FACTORY.Create(full_path, "AFF4Image", mode="w", token=token)
    fd.Write(open(file_path, "rb").read(1024 * 1024 * 30))
    fd.Close()
    return str(fd.urn)
コード例 #19
0
    def _ScanAttribute(self,
                       subject_prefix,
                       attribute,
                       after_urn=None,
                       limit=None):
        subject_prefix = utils.SmartStr(rdfvalue.RDFURN(subject_prefix))
        if subject_prefix[-1] != "/":
            subject_prefix += "/"
        subject_prefix += "%"

        query = """
    SELECT aff4.value, aff4.timestamp, subjects.subject
      FROM aff4
      JOIN subjects ON aff4.subject_hash=subjects.hash
      JOIN (
            SELECT subject_hash, MAX(timestamp) timestamp
            FROM aff4
            JOIN subjects ON aff4.subject_hash=subjects.hash
            WHERE aff4.attribute_hash=unhex(md5(%s))
                  AND subjects.subject like %s
                  AND subjects.subject > %s
            GROUP BY subject_hash
            ) maxtime ON aff4.subject_hash=maxtime.subject_hash
                  AND aff4.timestamp=maxtime.timestamp
      WHERE aff4.attribute_hash=unhex(md5(%s))
      ORDER BY subjects.subject
    """
        args = [attribute, subject_prefix, after_urn, attribute]

        if limit:
            query += " LIMIT %s"
            args.append(limit)

        results, _ = self.ExecuteQuery(query, args)
        return results
コード例 #20
0
    def testDownloadActionSizeLimitWithDownloadTruncatedPolicy(self):
        image_path = os.path.join(self.base_path, "test_img.dd")
        # Read a bit more than a typical chunk (600 * 1024).
        expected_size = 750 * 1024

        action = rdf_file_finder.FileFinderAction.Download(
            max_size=expected_size, oversized_file_policy="DOWNLOAD_TRUNCATED")

        results = self.RunFlow(paths=[image_path], action=action)

        urn = rdfvalue.RDFURN(self.client_id).Add("/fs/os").Add(image_path)
        blobimage = aff4.FACTORY.Open(urn, token=self.token)
        # Make sure a VFSBlobImage got written.
        self.assertTrue(isinstance(blobimage, aff4_grr.VFSBlobImage))

        self.assertEqual(len(blobimage), expected_size)
        data = blobimage.read(100 * expected_size)
        self.assertEqual(len(data), expected_size)

        expected_data = open(image_path, "rb").read(expected_size)
        self.assertEqual(data, expected_data)
        hash_obj = data_store_utils.GetFileHashEntry(blobimage)

        d = hashlib.sha1()
        d.update(expected_data)
        expected_hash = d.hexdigest()
        self.assertEqual(hash_obj.sha1, expected_hash)

        unused_flow, flow_reply = results[0]
        self.assertEqual(flow_reply.hash_entry.sha1, expected_hash)
コード例 #21
0
    def testDownloadAndHashActionSizeLimitWithHashTruncatedPolicy(self):
        image_path = os.path.join(self.base_path, "test_img.dd")
        # Read a bit more than a typical chunk (600 * 1024).
        expected_size = 750 * 1024

        hash_action = rdf_file_finder.FileFinderAction.Hash(
            max_size=expected_size, oversized_file_policy="HASH_TRUNCATED")
        download_action = rdf_file_finder.FileFinderAction.Download(
            max_size=expected_size, oversized_file_policy="HASH_TRUNCATED")

        for action in [hash_action, download_action]:
            results = self.RunFlow(paths=[image_path], action=action)

            urn = rdfvalue.RDFURN(self.client_id).Add("/fs/os").Add(image_path)
            vfs_file = aff4.FACTORY.Open(urn, token=self.token)
            # Make sure just a VFSFile got written.
            self.assertTrue(isinstance(vfs_file, aff4_grr.VFSFile))

            expected_data = open(image_path, "rb").read(expected_size)
            d = hashlib.sha1()
            d.update(expected_data)
            expected_hash = d.hexdigest()

            hash_entry = data_store_utils.GetFileHashEntry(vfs_file)
            self.assertEqual(hash_entry.sha1, expected_hash)

            unused_flow, flow_reply = results[0]
            self.assertEqual(flow_reply.hash_entry.sha1, expected_hash)
コード例 #22
0
    def StaticAdd(cls, queue_urn, rdf_value, mutation_pool=None):
        """Adds an rdf value the queue.

    Adds an rdf value to a queue. Does not require that the queue be locked, or
    even open. NOTE: The caller is responsible for ensuring that the queue
    exists and is of the correct type.

    Args:
      queue_urn: The urn of the queue to add to.

      rdf_value: The rdf value to add to the queue.

      mutation_pool: A MutationPool object to write to.

    Raises:
      ValueError: rdf_value has unexpected type.

    """
        if not isinstance(rdf_value, cls.rdf_type):
            raise ValueError(
                "This collection only accepts values of type %s." %
                cls.rdf_type.__name__)
        if mutation_pool is None:
            raise ValueError("Mutation pool can't be none.")

        timestamp = rdfvalue.RDFDatetime.Now().AsMicrosecondsSinceEpoch()

        if not isinstance(queue_urn, rdfvalue.RDFURN):
            queue_urn = rdfvalue.RDFURN(queue_urn)

        mutation_pool.QueueAddItem(queue_urn, rdf_value, timestamp)
コード例 #23
0
    def CheckCronJobAccess(self, token, cron_job_urn):
        if not cron_job_urn:
            raise ValueError("Cron job urn can't be empty.")
        cron_job_urn = rdfvalue.RDFURN(cron_job_urn)

        return ValidateToken(token, [cron_job_urn]) and (
            token.supervisor or self._CheckApprovals(token, cron_job_urn))
コード例 #24
0
ファイル: hunt_regression_test.py プロジェクト: qsdj/grr
    def Run(self):
        hunt_urn = rdfvalue.RDFURN("aff4:/hunts/H:123456")
        results = implementation.GRRHunt.ResultCollectionForHID(hunt_urn)
        with data_store.DB.GetMutationPool() as pool:
            result = rdf_flows.GrrMessage(
                payload=rdfvalue.RDFString("blah1"),
                age=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1))
            results.Add(result,
                        timestamp=result.age + rdfvalue.Duration("1s"),
                        mutation_pool=pool)

            result = rdf_flows.GrrMessage(
                payload=rdfvalue.RDFString("blah2-foo"),
                age=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))
            results.Add(result,
                        timestamp=result.age + rdfvalue.Duration("1s"),
                        mutation_pool=pool)

        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id="H:123456"))
        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id="H:123456",
                                                           count=1))
        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id="H:123456",
                                                           offset=1,
                                                           count=1))
        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id="H:123456",
                                                           filter="foo"))
コード例 #25
0
def ApprovalCreateRaw(aff4_path,
                      reason="",
                      expire_in=60 * 60 * 24 * 7,
                      token=None,
                      approval_type="ClientApproval"):
    """Creates an approval with raw access.

  This method requires raw datastore access to manipulate approvals directly.
  This currently doesn't work for hunt or cron approvals, because they check
  that each approver has the admin label.  Since the fake users don't exist the
  check fails.

  Args:
    aff4_path: The aff4_path or client id the approval should be created for.
    reason: The reason to put in the token.
    expire_in: Expiry in seconds to use in the token.
    token: The token that will be used. If this is specified reason and expiry
        are ignored.
    approval_type: The type of the approval to create.

  Returns:
    The token.

  Raises:
    RuntimeError: On bad token.
  """
    if approval_type in ["ClientApproval", security.ClientApproval]:
        urn = rdf_client.ClientURN(aff4_path)
    else:
        urn = rdfvalue.RDFURN(aff4_path)

    if not token:
        expiry = time.time() + expire_in
        token = access_control.ACLToken(reason=reason, expiry=expiry)

    if not token.reason:
        raise RuntimeError("Cannot create approval with empty reason")
    if not token.username:
        token.username = getpass.getuser()
    approval_urn = security.ApprovalRequestor.ApprovalUrnBuilder(
        urn.Path(), token.username, token.reason)
    super_token = access_control.ACLToken(username="******")
    super_token.supervisor = True

    if isinstance(approval_type, basestring):
        approval_type_cls = aff4.AFF4Object.classes[approval_type]
    else:
        approval_type_cls = approval_type

    approval_request = aff4.FACTORY.Create(approval_urn,
                                           approval_type_cls,
                                           mode="rw",
                                           token=super_token)

    # Add approvals indicating they were approved by fake "raw" mode users.
    approval_request.AddAttribute(
        approval_request.Schema.APPROVER("%s1-raw" % token.username))
    approval_request.AddAttribute(
        approval_request.Schema.APPROVER("%s-raw2" % token.username))
    approval_request.Close()
コード例 #26
0
    def testNotificationClaimsTimeout(self):
        collection_urn = rdfvalue.RDFURN(
            "aff4:/testNotificationClaimsTimeout/collection")
        with data_store.DB.GetMutationPool() as pool:
            for i in range(5):
                hunts_results.HuntResultCollection.StaticAdd(
                    collection_urn,
                    rdf_flows.GrrMessage(request_id=i),
                    mutation_pool=pool)

        results_1 = hunts_results.HuntResultQueue.ClaimNotificationsForCollection(
            token=self.token)
        self.assertEqual(5, len(results_1[1]))

        # Check that we have a claim - that another read returns nothing.
        results_2 = hunts_results.HuntResultQueue.ClaimNotificationsForCollection(
            token=self.token)
        self.assertEqual(0, len(results_2[1]))

        # Push time forward past the default claim timeout, then we should be able
        # to re-read (and re-claim).
        with test_lib.FakeTime(rdfvalue.RDFDatetime.Now() +
                               rdfvalue.Duration("45m")):
            results_3 = hunts_results.HuntResultQueue.ClaimNotificationsForCollection(
                token=self.token)
        self.assertEqual(results_3, results_1)
コード例 #27
0
ファイル: fake_data_store.py プロジェクト: rainser/grr
    def ScanAttributes(self,
                       subject_prefix,
                       attributes,
                       after_urn="",
                       max_records=None,
                       relaxed_order=False):
        subject_prefix = utils.SmartStr(rdfvalue.RDFURN(subject_prefix))
        if subject_prefix[-1] != "/":
            subject_prefix += "/"
        if after_urn:
            after_urn = utils.SmartUnicode(after_urn)
        subjects = []
        for s in self.subjects.keys():
            if s.startswith(subject_prefix) and s > after_urn:
                subjects.append(s)
        subjects.sort()

        return_count = 0
        for s in subjects:
            if max_records and return_count >= max_records:
                break
            r = self.subjects[s]
            results = {}
            for attribute in attributes:
                attribute_list = r.get(attribute)
                if attribute_list:
                    value, timestamp = attribute_list[-1]
                    results[attribute] = (timestamp, value)
            if results:
                return_count += 1
                yield (s, results)
コード例 #28
0
def OpenClient(client_id=None, token=None):
    """Opens the client, getting potential approval tokens.

  Args:
    client_id: The client id that should be opened.
    token: Token to use to open the client

  Returns:
    tuple containing (client, token) objects or (None, None) on if
    no appropriate aproval tokens were found.
  """
    if not token:
        try:
            token = ApprovalFind(client_id, token=token)
        except access_control.UnauthorizedAccess as e:
            logging.debug("No authorization found for access to client: %s", e)

    try:
        # Try and open with the token we managed to retrieve or the default.
        client = aff4.FACTORY.Open(rdfvalue.RDFURN(client_id),
                                   mode="r",
                                   token=token)
        return client, token
    except access_control.UnauthorizedAccess:
        logging.warning(
            "Unable to find a valid reason for client %s. You may need "
            "to request approval.", client_id)
        return None, None
コード例 #29
0
    def GetReportData(self, get_report_args, token):
        """Extract only the operating system type from the active histogram."""
        ret = rdf_report_plugins.ApiReportData(
            representation_type=rdf_report_plugins.ApiReportData.
            RepresentationType.PIE_CHART)

        try:
            fd = aff4.FACTORY.Open(
                rdfvalue.RDFURN("aff4:/stats/ClientFleetStats").Add(
                    get_report_args.client_label),
                token=token)
            for graph in fd.Get(
                    aff4_stats.ClientFleetStats.SchemaCls.RELEASE_HISTOGRAM):
                # Find the correct graph and merge the OS categories together
                if "%s day" % self.__class__.ACTIVE_DAYS in graph.title:
                    for sample in graph:
                        ret.pie_chart.data.Append(
                            rdf_report_plugins.ApiReportDataPoint1D(
                                label=sample.label, x=sample.y_value))
                    break
        except (IOError, TypeError):
            pass

        ret.pie_chart.data = sorted(ret.pie_chart.data,
                                    key=lambda point: point.label)

        return ret
コード例 #30
0
    def testGzExtension(self):
        with utils.Stubber(urllib2, "urlopen", FakeOpen):
            profile = self.server.GetProfileByName("pe")
            # We received compressed data.
            zlib.decompress(profile.data, 16 + zlib.MAX_WBITS)

            # We issued one http request.
            self.assertEqual(FakeHandle.read_count, 1)

            self.server.GetProfileByName("pe")

            # This time it should have been cached.
            self.assertEqual(FakeHandle.read_count, 1)

            self.server.GetProfileByName("pe")

            # This is the same profile.
            self.assertEqual(FakeHandle.read_count, 1)

        cache_urn = rdfvalue.RDFURN(config.CONFIG["Rekall.profile_cache_urn"])
        cached_items = list(
            aff4.FACTORY.Open(cache_urn.Add(
                server_stubs.REKALL_PROFILE_REPOSITORY_VERSION),
                              token=self.token).ListChildren())

        # We cache the .gz only.
        self.assertEqual(len(cached_items), 1)
        self.assertEqual(cached_items[0].Basename(), "pe")