def run(self, flow_obj=None): self._text_renderer = text.TextRenderer(session=self._session) self._data_renderer = data_export.DataExportRenderer( session=self._session) self._uploaded_files = {} self._flow_obj = flow_obj # Make sure to notify the flow status about the collection we are about # to create. flow_obj.status.collection_ids.append(self.collection.id) # Find the plugin we need to call. plugin_cls = plugin.Command.ImplementationByClass(self.plugin) if plugin_cls == None: raise plugin_cls.PluginError("Unknown plugin") plugin_obj = plugin_cls(session=self._session, **self.args) # We need to define the columns for the output collection. columns = [] for column in plugin_obj.table_header: columns.append(dict(name=column['name'], type="any")) # Add a new table to the collection. self.collection.tables.append(dict(name="data", columns=columns)) with self.collection.start(): for row in plugin_obj: self.collection.insert(table="data", **self.convert_row(row))
def GetSessionsAsJson(self): sessions = [] for session in self.session.session_list: de_renderer = data_export.DataExportRenderer(session=session) # Serialize the session and append it to the sessions list. object_renderer = renderer.ObjectRenderer.ForTarget( session, de_renderer)(session=session, renderer=de_renderer) sessions.append(object_renderer.EncodeToJsonSafe(session)) return sessions
def testEncoderCache(self): # Make the string long enough so that parts of it are garbage # collected. If the encoded uses id() to deduplicate it will fail since # id() might reuse across GCed objects. test_string = ("this_is_a_very_long_sentence" * 10) parts = [test_string[x:x + 16] for x in range(0, len(test_string), 16)] with data_export.DataExportRenderer( session=self.session, output=io.StringIO()).start() as renderer: utils.WriteHexdump(renderer, test_string) rows = [] for row in renderer.data: if row[0] == "r": rows.append(row[1]["data"]) self.assertEqual(rows, parts)
def __init__(self, collection, flow_obj, **kwargs): super(PluginRenderer, self).__init__(**kwargs) self.collection = collection self.current_section = "data" self.section_number = 0 self._text_renderer = text.TextRenderer(session=self.session) self._data_renderer = data_export.DataExportRenderer( session=self.session) self._uploaded_files = {} self._flow_obj = flow_obj self.collection.tables.append(dict(name="logs", columns=[ dict(name="timestamp", type="epoch"), dict(name="level", type="int"), dict(name="source"), dict(name="message"), ])) self.handler = LogCapturer(self)
def CheckObjectSerization(self, obj): json_renderer_obj = json_renderer.JsonRenderer(session=self.session) data_export_renderer_obj = data_export.DataExportRenderer( session=self.session) # First test json encodings. encoded = json_renderer_obj.encode(obj) # Make sure it is json safe. json.dumps(encoded) # Now decode it. decoded = json_renderer_obj.decode(encoded) self.assertEqual(decoded, obj) # Now check the DataExportRenderer. encoded = data_export_renderer_obj.encode(obj) # Make sure it is json safe. json.dumps(encoded)