def write(self, records, ptr): if not self.begin_transaction(): return False if not self.check_streammap_table(): utils.log_error("Couldn't create stream mapping table") return False for record in records: event = feed.parse_record(record) if not self.process_event(event): utils.log_error( "Couldn't process event {code:02x} at ({file:4d}, {offset:08x})" .format(code=record['code'], file=record['file'], offset=record['offset'])) if event.table is None: utils.log_error("Event record corrupted, ignored") else: return False self.execute_upsert("pointers", {"pointer": self.ptr_name}, {"pointer": self.ptr_name}, { "file": ptr[0], "pos": ptr[1] }) self.pointer = ptr return self.commit_transaction()
def write(self, records, ptr): if not self.begin_transaction(): return False if not self.check_streammap_table(): utils.log_error("Couldn't create stream mapping table") return False for record in records: event = feed.parse_record(record) if not self.process_event(event): utils.log_error( "Couldn't process event {code:02x} at ({file:4d}, {offset:08x})" .format(code=record['code'], file=record['file'], offset=record['offset'])) if event.table is None: utils.log_error("Event record corrupted, ignored") else: return False self.execute_sql( "INSERT INTO pointers (pointer, file, pos) VALUES (%s,%s,%s) ON CONFLICT (pointer) DO UPDATE SET file=%s,pos=%s;", (self.ptr_name, ptr[0], ptr[1], ptr[0], ptr[1])) self.pointer = ptr return self.commit_transaction()
def write(self, records, ptr): f = open(self.config['out'], "a") for record in records: fields = feed.parse_record(record, False) record_str = str(datetime.datetime.now()) + "\n" if self.config['type'] == "dump-hex": record_str = "T: " + record_str f.write(record_str) record_str = "" if self.config['type'] == "dump": record_str = event_name(record["code"]) + ", length: " + str( record["length"]) + "\n" elif self.config['type'] == "dump-hex": record_str = "R: {code:02x}: {length}\n".format( code=record["code"], length=record["length"]) f.write(record_str) for field in fields: value = field["value"] if multichain.is_binary_field(field["code"]): value = utils.bytes_to_hex(value) field_str = "" if self.config['type'] == "dump": field_str = " " + field_name( field["code"]) + ", length: " + str( field["length"]) + ": " + str(value) + "\n" elif self.config['type'] == "dump-hex": field_str = "F: {code:02x}: ".format( code=field["code"]) + str(value) + "\n" f.write(field_str) f.close self.pointer = ptr return utils.write_file_ptr(self.config, ptr)
def write(self, records, ptr): """ Writing feed data to this output. records - list of record objects: { 'code' : <event code> # see multichain.py for the list of events 'length' : record length 'data' : record data } Use feed.parse_record(record) to parse the record. feed.parse_record(record) returns parsed event object. The list of the fields depends on the event type. See Event* classes in multichain.py for details. feed.parse_record(record,False) returns list of fields: { 'code' : <field code> # see multichain.py for the list of fields and field types 'length' : field length 'value' : field value bytes for binary fields string for other types 'intvalue' : integer field value for integer and timestamp fields } ptr - feed read pointer - tuple ( file id, offset in file ) """ # Process records here for record in records: event=feed.parse_record(record) if not self.process_event(event): utils.log_error("Couldn't process event {code:02x} at ({file:4d}, {offset:08x})".format(code=record['code'],file=record['file'],offset=record['offset'])) return False self.pointer=ptr return utils.write_file_ptr(self.config,ptr)