def save(self, response: StoqResponse) -> None: """ Save results or ArchiverResponse to Kafka Either the full `StoqResponse` will be saved to the queue, or each individual payload that was archived by another archiver plugin. If it is an archived payload from a separate plugin, only the metadata produced from the archiver plugin will be sent to the queue, not the payload itself. """ self._connect() if self.publish_archive: for result in response.results: for archiver, meta in result.archivers.items(): # Construct a message that includes the original metadata # associated with the payload. r = { archiver: dict( ChainMap( meta, result.payload_meta.extra_data, {'request_meta': response.request_meta}, ) ) } self.producer.send(self.topic, helpers.dumps(r).encode()) else: self.producer.send(self.topic, str(response).encode()) self.producer.flush()
async def save(self, response: StoqResponse) -> None: """ Save response as Azure Blob Storage """ blob_client: BlobClient = BlobClient.from_connection_string( conn_str=self.conn_str, container_name=self.results_container, blob_name=response.scan_id, ) await blob_client.upload_blob(dumps(response)) await blob_client.close()
def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: self._connect() msg = { '_is_payload': True, '_content': payload.content, '_request_meta': request_meta, } self.producer.send(self.topic, helpers.dumps(msg).encode()) self.producer.flush() return ArchiverResponse()
async def save(self, response: StoqResponse) -> None: """ Save results or ArchiverResponse to redis """ if self.publish_archive: msgs: List[Dict] = [] for result in response.results: msgs = [{k: v} for k, v in result.archivers.items()] for msg in msgs: self.conn.rpush(self.redis_queue, dumps(msg)) else: self.conn.set(response.scan_id, str(response))
def test_dumps_types(self): # Verify that all of these come back with some content and don't raise self.assertTrue(helpers.dumps([1, 2, 3])) self.assertTrue(helpers.dumps({'a': 1, 'b': 2})) self.assertTrue(helpers.dumps({1, 2, 3})) self.assertTrue(helpers.dumps(datetime.now())) self.assertTrue(helpers.dumps(b'somebytes')) self.assertTrue(helpers.dumps(ClassWithAttrs()))
def save(self, response: StoqResponse) -> None: """ Save results or ArchiverResponse to Kafka """ self._connect() if self.publish_archive: msgs: List[str] = [] for result in response.results: msgs = [{k: v} for k, v in result.archivers.items()] for msg in msgs: self.producer.send(self.topic, helpers.dumps(msg).encode()) else: self.producer.send(self.topic, str(response).encode()) self.producer.flush()
def archive( self, payload: Payload, request_meta: RequestMeta ) -> Optional[ArchiverResponse]: """ Archive Payload object to Kafka queue """ self._connect() msg = { '_is_payload': True, '_content': b64encode(payload.content), '_payload_meta': payload.payload_meta.extra_data, '_request_meta': request_meta, } self.producer.send(self.topic, helpers.dumps(msg).encode()) self.producer.flush() return ArchiverResponse()
def __str__(self) -> str: return helpers.dumps(self)
def test_dumps_compactly(self): self.assertEqual(helpers.dumps({ 'a': 1, 'b': 2 }, compactly=True), '{"a": 1, "b": 2}')
def test_dumps_unicode(self): self.assertEqual(helpers.dumps({'key': b'value'}, compactly=True), '{"key": "value"}') self.assertEqual( helpers.dumps({'key': b'hi\xe7\x8c\xab'}, compactly=True), '{"key": "hi猫"}')