def process_entries(entry_queue, output_queue, match_callback): stopped = False total_processed = 0 while not stopped: count, entry = entry_queue.get() if entry == _STOP_WORKER: stopped = True # Each worker signals when they've picked up their # "STOP_WORKER" message. output_queue.put(QueueMessage( _WORKER_STOPPED, certificates_scanned=total_processed)) else: entry_response = client_pb2.EntryResponse() entry_response.ParseFromString(entry) parsed_entry = entry_decoder.decode_entry(entry_response) ts_entry = parsed_entry.merkle_leaf.timestamped_entry total_processed += 1 c = None if ts_entry.entry_type == client_pb2.X509_ENTRY: der_cert = ts_entry.asn1_cert else: # The original, signed precertificate. der_cert = (parsed_entry.extra_data.precert_chain_entry.pre_certificate) try: c = cert.Certificate(der_cert) except error.Error as e: try: c = cert.Certificate(der_cert, strict_der=False) except error.Error as e: output_queue.put(QueueMessage( _ERROR_PARSING_ENTRY, "Error parsing entry %d:\n%s" % (count, e))) else: output_queue.put(QueueMessage( _ERROR_PARSING_ENTRY, "Entry %d failed strict parsing:\n%s" % (count, c))) except Exception as e: print "Unknown parsing failure for entry %d:\n%s" % ( count, e) traceback.print_exc() output_queue.put(QueueMessage( _ERROR_PARSING_ENTRY, "Entry %d failed parsing with an unknown error:\n%s" % (count, e))) if c: match_result = match_callback( c, ts_entry.entry_type, parsed_entry.extra_data, count) if match_result: output_queue.put(QueueMessage( _ENTRY_MATCHING, "Entry %d:\n%s" % (count, c), matcher_output=match_result)) if not total_processed % _BATCH_SIZE: output_queue.put(QueueMessage( _PROGRESS_REPORT, "Scanned %d entries" % total_processed, certificates_scanned=_BATCH_SIZE))
def scan_entries(self, start, end): """Retrieve log entries. Args: start: index of the first entry to retrieve. end: index of the last entry to retrieve. Yields: client_pb2.EntryResponse protos Raises: KeyError: an entry with a sequence number in the range does not exist. """ with self.__mgr.get_connection() as conn: cursor = conn.cursor() next_id = start for row in cursor.execute( "SELECT id, entry FROM entries WHERE id " "BETWEEN ? and ? ORDER BY id ASC", (start, end)): if row["id"] != next_id: raise database.KeyError("No such entry: %d" % next_id) entry = client_pb2.EntryResponse() entry.ParseFromString(str(row["entry"])) yield entry next_id += 1 if next_id != end + 1: raise database.KeyError("No such entry: %d" % next_id)
def make_entries(start, end): entries = [] for i in range(start, end + 1): entry = client_pb2.EntryResponse() entry.leaf_input = "leaf_input-%d" % i entry.extra_data = "extra_data-%d" % i entries.append((i, entry)) return entries
def _parse_entry(json_entry): """Convert a json array element to an EntryResponse.""" entry_response = client_pb2.EntryResponse() try: entry_response.leaf_input = base64.b64decode(json_entry["leaf_input"]) entry_response.extra_data = base64.b64decode(json_entry["extra_data"]) except (TypeError, ValueError, KeyError) as e: raise InvalidResponseError("Invalid entry: %s\n%s" % (json_entry, e)) return entry_response
def _decode_entry(serialized_entry): entry = client_pb2.EntryResponse() entry.ParseFromString(serialized_entry) parsed_entry = client_pb2.ParsedEntry() tls_message.decode(entry.leaf_input, parsed_entry.merkle_leaf) parsed_entry.extra_data.entry_type = ( parsed_entry.merkle_leaf.timestamped_entry.entry_type) tls_message.decode(entry.extra_data, parsed_entry.extra_data) return parsed_entry
def test_update_entries_fails_not_enough_entries(self): client = FakeLogClient(self._NEW_STH) client.get_entries = mock.MagicMock() entry = client_pb2.EntryResponse() entry.leaf_input = "leaf" entry.extra_data = "extra" client.get_entries.return_value = iter([entry]) m = self.create_monitor(client) m._compute_projected_sth = self._NEW_STH_compute_projected # Get the new STH first. self.assertTrue(m._update_sth()) self.assertFalse(m._update_entries())
def produce(self): if self.throw: raise self.throw for i in range(self._start, self._end, self.batch_size): entries = [] for j in range(i, min(i + self.batch_size, self._end)): entry = client_pb2.EntryResponse() entry.leaf_input = "leaf_input-%d" % j entry.extra_data = "extra_data-%d" % j entries.append(entry) d = self.consumer.consume(entries) wfd = defer.waitForDeferred(d) yield wfd wfd.getResult() if self.stop: break if not self.stop: self.done.callback(self._end - self._start + 1)
def make_entry(leaf_index): entry = client_pb2.EntryResponse() entry.leaf_input = "leaf_input-%d" % leaf_index entry.extra_data = "extra_data-%d" % leaf_index return entry
def get_entries(self, start, end): for i in range(start, min(self.sth.tree_size, end + 1)): entry = client_pb2.EntryResponse() entry.leaf_input = "leaf_input-%d" % i entry.extra_data = "extra_data-%d" % i yield entry