def testDownloadsSingleHuntFileIfAuthorizationIsPresent(self): hunt_urn = self._CreateHuntWithDownloadedFile() hunt_id = hunt_urn.Basename() results = self.GetHuntResults(hunt_urn) self.RequestAndGrantHuntApproval(hunt_id) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('%s')" % hunt_id) self.Click("css=li[heading=Results]") if data_store.RelationalDBEnabled(): fd = file_store.OpenFile( flow_export.CollectionItemToClientPath(results[0])) else: fd = aff4.FACTORY.Open(flow_export.CollectionItemToAff4Path( results[0]), token=self.token) with mock.patch.object(fd.__class__, "Read") as mock_obj: self.Click( "css=grr-results-collection button:has(span.glyphicon-download)" ) self.WaitUntil(lambda: mock_obj.called)
def _ItemsToUrns(self, items): """Converts collection items to aff4 urns suitable for downloading.""" for item in items: try: yield flow_export.CollectionItemToAff4Path( item, self.client_id) except flow_export.ItemNotExportableError: pass
def Handle(self, args, token=None): if not args.hunt_id: raise ValueError("hunt_id can't be None") if not args.client_id: raise ValueError("client_id can't be None") if not args.vfs_path: raise ValueError("vfs_path can't be None") if not args.timestamp: raise ValueError("timestamp can't be None") api_vfs.ValidateVfsPath(args.vfs_path) results = implementation.GRRHunt.ResultCollectionForHID( args.hunt_id.ToURN()) expected_aff4_path = args.client_id.ToClientURN().Add(args.vfs_path) # TODO(user): should after_timestamp be strictly less than the desired # timestamp. timestamp = rdfvalue.RDFDatetime(int(args.timestamp) - 1) # If the entry corresponding to a given path is not found within # MAX_RECORDS_TO_CHECK from a given timestamp, we report a 404. for _, item in results.Scan( after_timestamp=timestamp.AsMicrosecondsSinceEpoch(), max_records=self.MAX_RECORDS_TO_CHECK): try: # Do not pass the client id we got from the caller. This will # get filled automatically from the hunt results and we check # later that the aff4_path we get is the same as the one that # was requested. aff4_path = export.CollectionItemToAff4Path(item, client_id=None) except export.ItemNotExportableError: continue if aff4_path != expected_aff4_path: continue try: aff4_stream = aff4.FACTORY.Open( aff4_path, aff4_type=aff4.AFF4Stream, token=token) if not aff4_stream.GetContentAge(): break return api_call_handler_base.ApiBinaryStream( "%s_%s" % (args.client_id, utils.SmartStr(aff4_path.Basename())), content_generator=self._GenerateFile(aff4_stream), content_length=len(aff4_stream)) except aff4.InstantiationError: break raise HuntFileNotFoundError( "File %s with timestamp %s and client %s " "wasn't found among the results of hunt %s" % (utils.SmartStr(args.vfs_path), utils.SmartStr(args.timestamp), utils.SmartStr(args.client_id), utils.SmartStr(args.hunt_id)))
def testDisplaysErrorMessageIfSingleHuntFileCanNotBeRead(self): hunt = self._CreateHuntWithDownloadedFile() results = hunt.ResultCollection() aff4_path = flow_export.CollectionItemToAff4Path(results[0]) with aff4.FACTORY.Create( aff4_path, aff4_type=aff4.AFF4Volume, token=self.token) as _: pass self.RequestAndGrantHuntApproval(hunt.urn.Basename()) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=grr-results-collection button:has(span.glyphicon-download)") self.WaitUntil(self.IsTextPresent, "Couldn't download the file.")
def testDownloadsSingleHuntFileIfAuthorizationIsPresent(self): hunt = self._CreateHuntWithDownloadedFile() results = hunt.ResultCollection() fd = aff4.FACTORY.Open( flow_export.CollectionItemToAff4Path(results[0]), token=self.token) self.RequestAndGrantHuntApproval(hunt.urn.Basename()) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") with mock.patch.object(fd.__class__, "Read") as mock_obj: self.Click( "css=grr-results-collection button:has(span.glyphicon-download)") self.WaitUntil(lambda: mock_obj.called)
def Generate(self, items, token=None): """Generates archive from a given collection. Iterates the collection and generates an archive by yielding contents of every referenced AFF4Stream. Args: items: Iterable of rdf_client_fs.StatEntry objects token: User's ACLToken. Yields: Binary chunks comprising the generated archive. """ del token # unused, to be removed with AFF4 code client_ids = set() for item_batch in collection.Batch(items, self.BATCH_SIZE): fds_to_write = {} for item in item_batch: try: urn = flow_export.CollectionItemToAff4Path( item, self.client_id) client_path = flow_export.CollectionItemToClientPath( item, self.client_id) except flow_export.ItemNotExportableError: continue fd = file_store.OpenFile(client_path) self.total_files += 1 if not self.predicate(client_path): self.ignored_files.append(utils.SmartUnicode(urn)) continue client_ids.add(client_path.client_id) # content_path = os.path.join(self.prefix, *urn_components) self.archived_files += 1 # Make sure size of the original file is passed. It's required # when output_writer is StreamingTarWriter. st = os.stat_result((0o644, 0, 0, 0, 0, 0, fd.size, 0, 0, 0)) fds_to_write[fd] = (client_path, urn, st) if fds_to_write: for fd, (client_path, urn, st) in iteritems(fds_to_write): try: for i, chunk in enumerate( file_store.StreamFilesChunks([client_path])): if i == 0: target_path = os.path.join( self.prefix, urn.Path()[1:]) yield self.archive_generator.WriteFileHeader( target_path, st=st) yield self.archive_generator.WriteFileChunk( chunk.data) yield self.archive_generator.WriteFileFooter() except Exception as exception: # pylint: disable=broad-except logging.exception(exception) self.archived_files -= 1 self.failed_files.append(unicode(urn)) if self.archive_generator.is_file_write_in_progress: yield self.archive_generator.WriteFileFooter() if client_ids: for client_id, client_info in iteritems( data_store.REL_DB.MultiReadClientFullInfo(client_ids)): client = api_client.ApiClient().InitFromClientInfo(client_info) for chunk in self._GenerateClientInfo(client_id, client): yield chunk for chunk in self._GenerateDescription(): yield chunk yield self.archive_generator.Close()