Ejemplo n.º 1
0
    def InitFromDatabaseObject(self, db_obj, approval_subject_obj=None):
        if not approval_subject_obj:
            approval_subject_obj = data_store.REL_DB.ReadClientFullInfo(
                db_obj.subject_id)
        self.subject = api_client.ApiClient().InitFromClientInfo(
            approval_subject_obj)

        return _InitApiApprovalFromDatabaseObject(self, db_obj)
Ejemplo n.º 2
0
    def Generate(self, items, token=None):
        """Generates archive from a given collection.

    Iterates the collection and generates an archive by yielding contents
    of every referenced AFF4Stream.

    Args:
      items: Iterable of rdf_client_fs.StatEntry objects
      token: User's ACLToken.

    Yields:
      Binary chunks comprising the generated archive.
    """

        del token  # unused, to be removed with AFF4 code

        client_ids = set()
        for item_batch in collection.Batch(items, self.BATCH_SIZE):

            client_paths = set()
            for item in item_batch:
                try:
                    client_path = flow_export.CollectionItemToClientPath(
                        item, self.client_id)
                except flow_export.ItemNotExportableError:
                    continue

                if not self.predicate(client_path):
                    self.ignored_files.add(client_path)
                    self.processed_files.add(client_path)
                    continue

                client_ids.add(client_path.client_id)
                client_paths.add(client_path)

            for chunk in file_store.StreamFilesChunks(client_paths):
                self.processed_files.add(chunk.client_path)
                for output in self._WriteFileChunk(chunk=chunk):
                    yield output

            self.processed_files |= client_paths - (self.ignored_files
                                                    | self.archived_files)

        if client_ids:
            for client_id, client_info in iteritems(
                    data_store.REL_DB.MultiReadClientFullInfo(client_ids)):
                client = api_client.ApiClient().InitFromClientInfo(client_info)
                for chunk in self._GenerateClientInfo(client_id, client):
                    yield chunk

        for chunk in self._GenerateDescription():
            yield chunk

        yield self.archive_generator.Close()
Ejemplo n.º 3
0
 def testUpdateClientsFromFleetspeak(self):
     client_id_1 = client_plugin.ApiClientId("C." + "1" * 16)
     client_id_2 = client_plugin.ApiClientId("C." + "2" * 16)
     client_id_3 = client_plugin.ApiClientId("C." + "3" * 16)
     clients = [
         client_plugin.ApiClient(client_id=client_id_1,
                                 fleetspeak_enabled=True),
         client_plugin.ApiClient(client_id=client_id_2,
                                 fleetspeak_enabled=True),
         client_plugin.ApiClient(client_id=client_id_3,
                                 fleetspeak_enabled=False),
     ]
     conn = mock.MagicMock()
     conn.outgoing.ListClients.return_value = admin_pb2.ListClientsResponse(
         clients=[
             admin_pb2.Client(
                 client_id=fleetspeak_utils.GRRIDToFleetspeakID(
                     client_id_1),
                 last_contact_time=TSProtoFromString(
                     "2018-01-01T00:00:01Z"),
                 last_clock=TSProtoFromString("2018-01-01T00:00:02Z")),
             admin_pb2.Client(
                 client_id=fleetspeak_utils.GRRIDToFleetspeakID(
                     client_id_2),
                 last_contact_time=TSProtoFromString(
                     "2018-01-02T00:00:01Z"),
                 last_clock=TSProtoFromString("2018-01-02T00:00:02Z"))
         ])
     with mock.patch.object(fleetspeak_connector, "CONN", conn):
         client_plugin.UpdateClientsFromFleetspeak(clients)
     self.assertEqual(clients, [
         client_plugin.ApiClient(
             client_id=client_id_1,
             fleetspeak_enabled=True,
             last_seen_at=rdfvalue.RDFDatetime.FromHumanReadable(
                 "2018-01-01T00:00:01Z"),
             last_clock=rdfvalue.RDFDatetime.FromHumanReadable(
                 "2018-01-01T00:00:02Z")),
         client_plugin.ApiClient(
             client_id=client_id_2,
             fleetspeak_enabled=True,
             last_seen_at=rdfvalue.RDFDatetime.FromHumanReadable(
                 "2018-01-02T00:00:01Z"),
             last_clock=rdfvalue.RDFDatetime.FromHumanReadable(
                 "2018-01-02T00:00:02Z")),
         client_plugin.ApiClient(client_id=client_id_3,
                                 fleetspeak_enabled=False),
     ])
Ejemplo n.º 4
0
 def _CreateApiClient(self, platform):
     client_snapshot = self._SetupTestClientObject(0, system=platform)
     api_client = plugin_client.ApiClient()
     api_client.InitFromClientObject(client_snapshot)
     return api_client
Ejemplo n.º 5
0
    def Generate(self, items, token=None):
        """Generates archive from a given collection.

    Iterates the collection and generates an archive by yielding contents
    of every referenced AFF4Stream.

    Args:
      items: Iterable of rdf_client_fs.StatEntry objects
      token: User's ACLToken.

    Yields:
      Binary chunks comprising the generated archive.
    """

        del token  # unused, to be removed with AFF4 code

        client_ids = set()
        for item_batch in collection.Batch(items, self.BATCH_SIZE):

            fds_to_write = {}
            for item in item_batch:
                try:
                    urn = flow_export.CollectionItemToAff4Path(
                        item, self.client_id)
                    client_path = flow_export.CollectionItemToClientPath(
                        item, self.client_id)
                except flow_export.ItemNotExportableError:
                    continue

                fd = file_store.OpenFile(client_path)
                self.total_files += 1

                if not self.predicate(client_path):
                    self.ignored_files.append(utils.SmartUnicode(urn))
                    continue

                client_ids.add(client_path.client_id)

                # content_path = os.path.join(self.prefix, *urn_components)
                self.archived_files += 1

                # Make sure size of the original file is passed. It's required
                # when output_writer is StreamingTarWriter.
                st = os.stat_result((0o644, 0, 0, 0, 0, 0, fd.size, 0, 0, 0))
                fds_to_write[fd] = (client_path, urn, st)

            if fds_to_write:
                for fd, (client_path, urn, st) in iteritems(fds_to_write):
                    try:
                        for i, chunk in enumerate(
                                file_store.StreamFilesChunks([client_path])):
                            if i == 0:
                                target_path = os.path.join(
                                    self.prefix,
                                    urn.Path()[1:])
                                yield self.archive_generator.WriteFileHeader(
                                    target_path, st=st)

                            yield self.archive_generator.WriteFileChunk(
                                chunk.data)

                        yield self.archive_generator.WriteFileFooter()
                    except Exception as exception:  # pylint: disable=broad-except
                        logging.exception(exception)

                        self.archived_files -= 1
                        self.failed_files.append(unicode(urn))

                if self.archive_generator.is_file_write_in_progress:
                    yield self.archive_generator.WriteFileFooter()

        if client_ids:
            for client_id, client_info in iteritems(
                    data_store.REL_DB.MultiReadClientFullInfo(client_ids)):
                client = api_client.ApiClient().InitFromClientInfo(client_info)
                for chunk in self._GenerateClientInfo(client_id, client):
                    yield chunk

        for chunk in self._GenerateDescription():
            yield chunk

        yield self.archive_generator.Close()