Пример #1
0
 def test_payloadresults_to_str(self):
     payload = Payload(self.generic_content)
     response = PayloadResults.from_payload(payload)
     response_str = str(response)
     response_dict = json.loads(response_str)
     self.assertIsInstance(response_str, str)
     self.assertIsInstance(response_dict, dict)
Пример #2
0
 def test_reconstruct_all_subresponses(self):
     # Construct a fake stoq_response as if it were generated from a file
     # A.zip that contains two files, B.txt and C.zip, where C.zip contains D.txt
     initial_response = StoqResponse(
         results=[
             PayloadResults(
                 payload_id="A.zip",
                 size=0,
                 payload_meta=PayloadMeta(),
                 workers=[{"fake": "result1"}],
                 plugins_run={"workers": [["fake"]]},
             ),
             PayloadResults(
                 payload_id="B.txt",
                 size=0,
                 payload_meta=PayloadMeta(),
                 workers=[{"fake": "result2"}],
                 plugins_run={"workers": [["fake"]]},
                 extracted_from="A.zip",
                 extracted_by="fake",
             ),
             PayloadResults(
                 payload_id="C.zip",
                 size=0,
                 payload_meta=PayloadMeta(),
                 workers=[{"fake": "result3"}],
                 plugins_run={"workers": [["fake"]]},
                 extracted_from="A.zip",
                 extracted_by="fake",
             ),
             PayloadResults(
                 payload_id="D.txt",
                 size=0,
                 payload_meta=PayloadMeta(),
                 workers=[{"fake": "result4"}],
                 plugins_run={"workers": [["fake"]]},
                 extracted_from="C.zip",
                 extracted_by="fake",
             ),
         ],
         request_meta=RequestMeta(extra_data={"check": "me"}),
         errors={},
     )
     s = Stoq(base_dir=utils.get_data_dir(), decorators=["simple_decorator"])
     all_subresponses = list(s.reconstruct_all_subresponses(initial_response))
     # We expect there to be four "artificial" responses generated, one for
     # each payload as the root.
     self.assertEqual(len(all_subresponses), 4)
     # We expect the first response to have all 4 payloads, the second response
     # to have just the second payload, the third response to have the third
     # and fourth payload, and the fourth response to have just the fourth payload
     self.assertEqual(
         [len(stoq_response.results) for stoq_response in all_subresponses], [4, 1, 2, 1]
     )
     self.assertEqual(
         [
             stoq_response.results[0].workers[0]["fake"]
             for stoq_response in all_subresponses
         ],
         ["result1", "result2", "result3", "result4"],
     )
     self.assertTrue(
         all(
             "simple_decorator" in stoq_response.decorators
             for stoq_response in all_subresponses
         )
     )
     # Assert that they all have the same scan ID
     self.assertEqual(
         len({stoq_response.scan_id for stoq_response in all_subresponses}), 1
     )
Пример #3
0
    def _single_scan(
        self,
        payload: Payload,
        add_dispatch: List[str],
        add_deep_dispatch: List[str],
        request_meta: RequestMeta,
    ) -> Tuple[PayloadResults, List[Payload], DefaultDict[str, List[str]]]:

        extracted = []
        errors: DefaultDict[str, List[str]] = defaultdict(list)
        dispatch_pass = 0

        dispatches, dispatch_errors = self._get_dispatches(
            payload, add_dispatch, request_meta)
        if dispatch_errors:
            errors = helpers.merge_dicts(errors, dispatch_errors)
        for plugin_name in dispatches:
            try:
                plugin = self.load_plugin(plugin_name)
            except Exception as e:
                msg = 'worker:failed to load'
                self.log.exception(msg)
                errors[plugin_name].append(helpers.format_exc(e, msg=msg))
                continue
            # Normal dispatches are the "1st round" of scanning
            payload.plugins_run['workers'][0].append(plugin_name)
            try:
                worker_response = plugin.scan(payload,
                                              request_meta)  # pyre-ignore[16]
            except Exception as e:
                msg = 'worker:failed to scan'
                self.log.exception(msg)
                errors[plugin_name].append(helpers.format_exc(e, msg=msg))
                continue
            if worker_response is None:
                continue
            if worker_response.results is not None:
                # Normal dispatches are the "1st round" of scanning
                payload.worker_results[0][
                    plugin_name] = worker_response.results
            extracted.extend([
                Payload(ex.content, ex.payload_meta, plugin_name,
                        payload.payload_id) for ex in worker_response.extracted
            ])
            if worker_response.errors:
                errors[plugin_name].extend(worker_response.errors)

        while dispatch_pass < self.max_dispatch_passes:
            dispatch_pass += 1
            deep_dispatches, deep_dispatch_errors = self._get_deep_dispatches(
                payload, add_deep_dispatch, request_meta)
            if deep_dispatch_errors:
                errors = helpers.merge_dicts(errors, deep_dispatch_errors)
            if deep_dispatches:
                # Add another entry for this round
                payload.plugins_run['workers'].append([])
                payload.worker_results.append({})
            else:
                break
            for plugin_name in deep_dispatches:
                try:
                    plugin = self.load_plugin(plugin_name)
                except Exception as e:
                    msg = f'deep dispatch:failed to load (pass {dispatch_pass}/{self.max_dispatch_passes})'
                    self.log.exception(msg)
                    errors[plugin_name].append(helpers.format_exc(e, msg=msg))
                    continue
                payload.plugins_run['workers'][dispatch_pass].append(
                    plugin_name)
                try:
                    worker_response = plugin.scan(  # pyre-ignore[16]
                        payload, request_meta)
                except Exception as e:
                    msg = f'deep dispatch:failed to scan (pass {dispatch_pass}/{self.max_dispatch_passes})'
                    self.log.exception(msg)
                    errors[plugin_name].append(helpers.format_exc(e, msg=msg))
                    continue
                if worker_response is None:
                    continue
                if worker_response.results is not None:
                    payload.worker_results[dispatch_pass][
                        plugin_name] = worker_response.results
                extracted.extend([
                    Payload(ex.content, ex.payload_meta, plugin_name,
                            payload.payload_id)
                    for ex in worker_response.extracted
                ])
                if worker_response.errors:
                    errors[plugin_name].extend(worker_response.errors)

        payload_results = PayloadResults.from_payload(payload)
        if request_meta.archive_payloads and payload.payload_meta.should_archive:
            for plugin_name, archiver in self._loaded_dest_archiver_plugins.items(
            ):
                payload.plugins_run['archivers'].append(plugin_name)
                try:
                    archiver_response = archiver.archive(payload, request_meta)
                except Exception as e:
                    msg = 'archiver:failed to archive'
                    self.log.exception(msg)
                    errors[plugin_name].append(helpers.format_exc(e, msg=msg))
                    continue
                if archiver_response is None:
                    continue
                if archiver_response.results is not None:
                    payload_results.archivers[
                        plugin_name] = archiver_response.results
                if archiver_response.errors:
                    errors[plugin_name].extend(archiver_response.errors)

        return (payload_results, extracted, errors)