def clean(): '''Delete temporary files not under version control.''' basedir = dirname(__file__) print(cyan('delete temp files and dirs for packaging')) local( flo('rm -rf ' '{basedir}/.eggs/ ' '{basedir}/fabsetup.egg-info/ ' '{basedir}/dist ' '{basedir}/README ' '{basedir}/build/ ')) print(cyan('\ndelete temp files and dirs for editing')) local(flo('rm -rf ' '{basedir}/.cache ' '{basedir}/.ropeproject ')) print(cyan('\ndelete bytecode compiled versions of the python src')) # cf. http://stackoverflow.com/a/30659970 local( flo('find {basedir}/fabsetup ' 'fabfile* ') + '\( -name \*pyc -o -name \*.pyo -o -name __pycache__ ' '-o -name \*.so -o -name \*.o -o -name \*.c \) ' '-prune ' '-exec rm -rf {} + || true')
def clean(): '''Delete temporary files not under version control.''' basedir = dirname(__file__) print(cyan('delete temp files and dirs for packaging')) local(flo( 'rm -rf ' '{basedir}/.eggs/ ' '{basedir}/fabsetup.egg-info/ ' '{basedir}/dist ' '{basedir}/README ' '{basedir}/build/ ' )) print(cyan('\ndelete temp files and dirs for editing')) local(flo( 'rm -rf ' '{basedir}/.cache ' '{basedir}/.ropeproject ' )) print(cyan('\ndelete bytecode compiled versions of the python src')) # cf. http://stackoverflow.com/a/30659970 local(flo('find {basedir}/fabsetup ' 'fabfile* ') + '\( -name \*pyc -o -name \*.pyo -o -name __pycache__ ' '-o -name \*.so -o -name \*.o -o -name \*.c \) ' '-prune ' '-exec rm -rf {} + || true')
def letsencrypt(): '''Create tls-webserver certificates which are trusted by the web pki. The wildcard certificates are issued by Let's Encrypt. Touched files, dirs, and installed packages: /etc/letsencrypt/* ''' repo_dir = checkup_git_repo('https://github.com/certbot/certbot.git', prefix='## ', postfix='\n') with stopped_nginx(): options = ' '.join([ '--standalone', '--rsa-key-size 4096', ]) from config import domain_groups for domains in domain_groups: domains_str = ', '.join(domains) print_msg(flo( '\n## Create certificate for: {domains_str}\n')) domain_opts = ' '.join([flo(' -d {domain}') for domain in domains]) # command 'letsencrypt-auto' requests for root by itself via 'sudo' run(flo('{repo_dir}/letsencrypt-auto ' '{options} {domain_opts} certonly')) list_cert_files()
def scrape_and_verify_scts(hostname, verification_tasks, ctlogs): logger.info(flo('# {hostname}\n')) res = do_handshake(hostname, scts_tls=(verify_scts_by_tls in verification_tasks), scts_ocsp=(verify_scts_by_ocsp in verification_tasks)) if res.ee_cert_der: logger.debug('got certificate\n') if res.ee_cert.is_ev_cert: logger.info('* EV cert') else: logger.info('* no EV cert') if res.ee_cert.is_letsencrypt_cert: logger.info("* issued by Let's Encrypt\n") else: logger.info("* not issued by Let's Encrypt\n") if res.err: logger.warn(res.err) else: for verification_task in verification_tasks: logger.info(flo('## {verification_task.__name__}\n')) verifications = verification_task(res, ctlogs) if verifications: for verification in verifications: show_verification(verification) elif res.ee_cert_der is not None: logger.info('no SCTs\n')
def warn_inconsistency(url, val_a, val_b): # suppress warning doubles (i know it's hacky) key = flo('{url}' + ''.join(sorted(flo('{val_a}{val_b}')))) if not hasattr(warn_inconsistency, 'seen'): warn_inconsistency.seen = {} if not warn_inconsistency.seen.get(key, False): warn_inconsistency.seen[key] = True else: return logger.warn(red( flo('inconsistent data for log {url}: {val_a} != {val_b}')))
def _parse_timestamped_entry(tdf): with TdfBytesParser(tdf) as parser: parser.read('timestamp', '!Q') # uint64 -> 8 bytes entry_type = LogEntryType( parser.delegate('entry_type', _parse_log_entry_type)) # parse leaf_entry if entry_type.is_x509_entry: parser.delegate('signed_entry', _parse_asn1_cert) parser.res['x509_entry'] = parser.res['signed_entry'] elif entry_type.is_precert_entry: parser.delegate('signed_entry', _parse_pre_cert) parser.res['precert_entry'] = parser.res['signed_entry'] else: raise Exception(flo('Unknown entry_type number: {entry_type}')) # TODO DEBUG ctlog_get_entries.py related (it looks like some log # answers are missing # the ct_extensions, # or an error in parse routines) try: parser.delegate('extensions', _parse_ct_extensions) except struct.error: pass return parser.result()
def create_signature_input(ee_cert, sct, *_, **__): # cf. https://tools.ietf.org/html/rfc6962#section-3.2 signature_type = 0 # 0 means certificate_timestamp entry_type = 0 # 0: ASN.1Cert, 1: PreCert def reduce_func(accum_value, current): fmt = accum_value[0] + current[0] values = accum_value[1] + (current[1], ) return fmt, values initializer = ('!', ()) # fmt = '!BBQh...', values = [<sct.version>, <signature_type>, ...] fmt, values = reduce( reduce_func, [ ('B', sct.version.val), ('B', signature_type), ('Q', sct.timestamp), ('h', entry_type), # signed_entry ('B', ee_cert.len1), ('B', ee_cert.len2), ('B', ee_cert.len3), (flo('{ee_cert.len}s'), ee_cert.der), ('h', sct.extensions_len), ], initializer) return struct.pack(fmt, *values)
def test_pyopenssl_certificate_from_der(): basedir = join(dirname(__file__), 'data', 'test_sct_ee_cert') for filename in ['ev_cert.der', 'cert_no_ev.der']: cert_der = open(flo('{basedir}/{filename}'), 'rb').read() got = pyopenssl_certificate_from_der(cert_der) assert type(got) is OpenSSL.crypto.X509
def show_verification(verification): ''' Args: verification(ctutlz.sct.verification.SctVerificationResult) ''' sct = verification.sct sct_log_id1, sct_log_id2 = [ to_hex(val) for val in struct.unpack("!16s16s", sct.log_id.tdf) ] logger.info('```') logger.verbose('=' * 59) logger.verbose(flo('Version : {sct.version_hex}')) logger.verbose(flo('LogID : {sct_log_id1}')) logger.verbose(flo(' {sct_log_id2}')) logger.info(flo('LogID b64 : {sct.log_id_b64}')) logger.verbose(flo('Timestamp : {sct.timestamp} ({sct.timestamp_hex})')) logger.verbose( flo('Extensions: {sct.extensions_len} ({sct.extensions_len_hex})')) logger.verbose( flo('Algorithms: {sct.signature_alg_hash_hex}/' '{sct.signature_algorithm_signature} (hash/sign)')) show_signature_verbose(sct.signature) prefix = 'Sign. b64 : ' logger.info(prefix + text_with_newlines(sct.signature_b64, line_length=16 * 3, newline='\n' + ' ' * len(prefix))) logger.verbose( '--') # visual gap between sct infos and verification result log = verification.log if log is None: logger.info('Log not found\n') else: logger.info(flo('Log found : {log.description}')) logger.verbose('Operator : %s' % log.operated_by['name']) logger.info('Chrome : %s' % log.scts_accepted_by_chrome) if verification.verified: logger.info(flo('Result : Verified OK')) else: logger.info(flo('Result : Verification Failure')) logger.info('```\n')
def show_logs(logs, heading, order=2): if len(logs) > 0: logger.info('#' * order + flo(' {heading}\n')) s_or_not = 's' if len(logs) == 1: s_or_not = '' # show log size logger.info('%i log%s\n' % (len(logs), s_or_not)) # list log urls for log in logs: if logger.level < logging.INFO: anchor = log.url.replace('/', '') logger.verbose(flo('* [{log.url}](#{anchor})')) else: logger.info(flo('* {log.url}')) logger.info('') for log in logs: show_log(log)
def SignedCertificateTimestampList(sctlist): with StructContext(sctlist) as struct: data_dict = { 'signed_certificate_timestamp_list_len': struct.read('!H'), } sct_list = [] while struct.offset < struct.length: sct_len = struct.read('!H') sct_der = struct.read(flo('!{sct_len}s')) sct_list.append(_SctListEntry(sct_len, sct_der)) return _SignedCertificateTimestampList(sct_list=sct_list, **data_dict)
def pypi(): '''Build package and upload to pypi.''' if not query_yes_no('version updated in ' '`fabsetup/_version.py`?'): print('abort') else: print(cyan('\n## clean-up\n')) execute(clean) basedir = dirname(__file__) # latest_pythons = _determine_latest_pythons() # # e.g. highest_minor: '3.6' # highest_minor = _highest_minor(latest_pythons) # python = flo('python{highest_minor}') python = 'python' print(cyan('\n## build package')) local(flo('cd {basedir} && {python} setup.py sdist')) print(cyan('\n## upload package')) local(flo('cd {basedir} && {python} -m twine upload dist/*'))
def test_is_ev_cert(): basedir = join(dirname(__file__), 'data', 'test_sct_ee_cert') test_data = [ ('ev_cert.der', True), ('cert_no_ev.der', False), ] for filename, expected in test_data: cert_der = open(flo('{basedir}/{filename}'), 'rb').read() ee_cert = EndEntityCert(cert_der) assert ee_cert.is_ev_cert is expected
def TlsExtension18(extension_18_tdf): with StructContext(extension_18_tdf) as struct: data_dict = { 'tls_extension_type': struct.read('!H'), 'tls_extension_len': struct.read('!H'), 'signed_certificate_timestamp_list_len': struct.read('!H'), } sct_list = [] while struct.offset < struct.length: sct_len = struct.read('!H') sct_der = struct.read(flo('!{sct_len}s')) sct_list.append(_SctListEntry(sct_len, sct_der)) return _TlsExtension18(sct_list=sct_list, **data_dict)
def pypi(): '''Build package and upload to pypi.''' if not query_yes_no('version updated in ' '`fabsetup/_version.py`?'): print('abort') else: print(cyan('\n## clean-up\n')) execute(clean) basedir = dirname(__file__) # latest_pythons = _determine_latest_pythons() # # e.g. highest_minor: '3.6' # highest_minor = _highest_minor(latest_pythons) # python = flo('python{highest_minor}') python = 'python' print(cyan('\n## build package')) local(flo('cd {basedir} && {python} setup.py sdist')) print(cyan('\n## upload package')) local(flo('cd {basedir} && {python} -m twine upload dist/*'))
def _parse_asn1_cert(tdf): with TdfBytesParser(tdf) as parser: parser.read('len1', '!B') parser.read('len2', '!B') parser.read('len3', '!B') der_len = struct.unpack( '=I', struct.pack('!4B', 0, parser.res['len1'], parser.res['len2'], parser.res['len3']))[0] parser.res['der_len'] = der_len parser.read('der', flo('!{der_len}s')) return parser.result()
def show_log(log, order=3): logger.verbose('#' * order + flo(' {log.url}\n')) logdict = log._asdict() for key, value in logdict.items(): if key == 'id_b64_non_calculated' and value == log.id_b64: value = None # don't log this value if key == 'operated_by': value = ', '.join(value) # avoid markdown syntax interpretation and improve readablity key = key.replace('_', ' ') if value is not None: logger.verbose(flo('* __{key}__: `{value}`')) logger.verbose( flo('* __scts accepted by chrome__: ' '{log.scts_accepted_by_chrome}')) if log.key is not None: logger.verbose(flo('* __id b64__: `{log.id_b64}`')) logger.verbose(flo('* __pubkey__:\n```\n{log.pubkey}\n```')) logger.verbose('')
def _parse_signed_certificate_timestamp(tdf): with TdfBytesParser(tdf) as parser: parser.delegate('version', _parse_version) parser.delegate('id', _parse_log_id) parser.read('timestamp', '!Q') parser.delegate('ct_extensions', _parse_ct_extensions) # digitally-signed struct parser.read('signature_alg_hash', '!B'), parser.read('signature_alg_sign', '!B'), signature_len = parser.read('signature_len', '!H') parser.read('signature', flo('!{signature_len}s')) return parser.result()
def serverinfo_cli_parse_cb(ssl, ext_type, _in, inlen, al, arg): if ext_type == 18: def reduce_func(accum_value, current): fmt = accum_value[0] + current[0] values = accum_value[1] + (current[1], ) return fmt, values initializer = ('!', ()) fmt, values = reduce(reduce_func, [ ('H', ext_type), ('H', inlen), (flo('{inlen}s'), bytes(ffi.buffer(_in, inlen))), ], initializer) ctx.tls_ext_18_tdf = struct.pack(fmt, *values) return 1 # True
def _parse_log_entry(tdf): with TdfBytesParser(tdf) as parser: entry_type = LogEntryType( parser.delegate('entry_type', _parse_log_entry_type)) # parse entry if entry_type.is_x509_entry: parser.delegate('entry', _parse_x509_chain_entry) parser.res['x509_entry'] = parser.res['entry'] elif entry_type.is_precert_entry: parser.delegate('entry', _parse_precert_chain_entry) parser.res['precert_entry'] = parser.res['entry'] else: raise Exception(flo('Unknown entry_type: {entry_type}')) return parser.result()
def show_signature_verbose(signature): '''Print out signature as hex string to logger.verbose. Args: signature(bytes) ''' sig_offset = 0 while sig_offset < len(signature): if len(signature) - sig_offset > 16: bytes_to_read = 16 else: bytes_to_read = len(signature) - sig_offset sig_bytes = struct.unpack_from(flo('!{bytes_to_read}s'), signature, sig_offset)[0] if sig_offset == 0: logger.verbose('Signature : %s' % to_hex(sig_bytes)) else: logger.verbose(' %s' % to_hex(sig_bytes)) sig_offset = sig_offset + bytes_to_read
def create_signature_input_precert(ee_cert, sct, issuer_cert): # cf. https://tools.ietf.org/html/rfc6962#section-3.2 signature_type = 0 # 0 means certificate_timestamp entry_type = 1 # 0: ASN.1Cert, 1: PreCert tbscert = ee_cert.tbscert.without_ct_extensions def reduce_func(accum_value, current): fmt = accum_value[0] + current[0] values = accum_value[1] + (current[1], ) return fmt, values initializer = ('!', ()) # fmt = '!BBQh...', values = [<sct.version>, <signature_type>, ...] fmt, values = reduce( reduce_func, [ ('B', sct.version.val), ('B', signature_type), ('Q', sct.timestamp), ('h', entry_type), # signed_entry # issuer_key_hash[32] ('32s', issuer_cert.pubkey_hash), # tbs_certificate (rfc6962, page 12) # * DER encoded TBSCertificate of the ee_cert # * without SCT extension ('B', tbscert.len1), ('B', tbscert.len2), ('B', tbscert.len3), (flo('{tbscert.len}s'), tbscert.der), ('h', sct.extensions_len), ], initializer) return struct.pack(fmt, *values)
def test_verify_signature(): test_data = [ Item( domain='', expected_verify=True, signature_input_bin=from_file('signature_input_valid.bin'), signature_der=from_file('signature.der'), pubkey_pem=from_file('pubkey.pem') ), Item( domain='', expected_verify=False, signature_input_bin=b'some invalid signature input', signature_der=from_file('signature.der'), pubkey_pem=from_file('pubkey.pem') ), Item( domain='google.com', expected_verify=True, signature_input_bin=from_file('google.com/signature_input.bin'), signature_der=from_file('google.com/signature.der'), pubkey_pem=from_file('google.com/pubkey.pem') ), # Item( # domain='pirelli.com', # expected_verify=True, # signature_input_bin=from_file('pirelli.com/signature_input.bin'), # signature_der=from_file('pirelli.com/signature.der'), # pubkey_pem=from_file('pirelli.com/pubkey.pem') # ), ] for item in test_data: assert verify_signature(item.signature_input_bin, item.signature_der, item.pubkey_pem) \ is item.expected_verify, flo('verify_signature() for {item.domain} ' 'must return {item.expected_verify}')
def from_file(filename): basedir = join(dirname(__file__), 'data', 'test_sct_verify_signature') with open(flo('{basedir}/{filename}'), 'rb') as fh: data = fh.read() return data
def ctloglist(print_json=None): '''Gather ct-log lists and print the merged log list. Args: print_json(boolean): If True, print merged log list as json data. Else print as markdown. ''' if not print_json: today = datetime.date.today() now = datetime.datetime.now() logger.info('# Known Certificate Transparency (CT) Logs\n') logger.verbose('Created with [ctloglist]' '(https://github.com/theno/ctutlz#ctloglist)\n') logger.verbose('Merged log lists:') logger.verbose("* webpage [known logs]" '(https://www.certificate-transparency.org/known-logs)') logger.verbose('* [log_list.json]' '(https://www.gstatic.com/ct/log_list/log_list.json)') logger.verbose( '* [all_logs_list.json](' 'https://www.gstatic.com/ct/log_list/all_logs_list.json)' '\n') logger.info(flo('Version (Date): {today}\n')) logger.verbose(flo('Datetime: {now}\n')) logger.info('') # formatting: insert empty line # from webpage webpage_dict = logs_dict_from_webpage() all_from_webpage = Logs([ log_dict for log_list in [webpage_dict[key] for key in webpage_dict] for log_dict in log_list ]) included_from_webpage = Logs(webpage_dict.get('included_in_chrome', [])) try: webpage_dict.pop('included_in_chrome') except KeyError: pass frozen_from_webpage = Logs(webpage_dict.get('frozen_logs', [])) try: webpage_dict.pop('frozen_logs') except KeyError: pass pending_from_webpage = Logs( webpage_dict.get('pending_inclusion_in_chrome', [])) try: webpage_dict.pop('pending_inclusion_in_chrome') except KeyError: pass disqualified_from_webpage = \ Logs(webpage_dict.get('disqualified_from_chrome', [])) try: webpage_dict.pop('disqualified_from_chrome') except KeyError: pass rejected_from_webpage = Logs(webpage_dict.get('rejected_by_chrome', [])) try: webpage_dict.pop('rejected_by_chrome') except KeyError: pass distrusted_from_webpage = Logs( webpage_dict.get('completely_distrusted_by_chrome', [])) try: webpage_dict.pop('completely_distrusted_by_chrome') except KeyError: pass other_from_webpage = Logs(webpage_dict.get('other_logs', [])) try: webpage_dict.pop('other_logs') except KeyError: pass special_from_webpage = Logs(webpage_dict.get('special_purpose_logs', [])) try: webpage_dict.pop('special_purpose_logs') except KeyError: pass unknown_log_titles = [key for key in webpage_dict.keys()] if unknown_log_titles: logger.error( red( flo('unknown log titles (i.e. log states): {unknown_log_titles}' ))) # log_list.json: chrome ct policy compliant logs log_list_dict = download_log_list(URL_LOG_LIST) set_operator_names(log_list_dict) for log_dict in log_list_dict['logs']: if 'disqualified_at' in log_dict.keys(): log_dict['chrome_state'] = ChromeStates.DISQUALIFIED elif 'final_sth' in log_dict.keys(): log_dict['chrome_state'] = ChromeStates.FROZEN else: log_dict['chrome_state'] = ChromeStates.INCLUDED log_list_logs = Logs(log_list_dict['logs']) # all_logs_list.json all_dict = download_log_list(URL_ALL_LOGS) set_operator_names(all_dict) all_logs = Logs(all_dict['logs']) # merge lists and show the result log_lists = merge_log_lists(**locals()) if print_json: data = { 'operators': all_dict['operators'], 'logs': list_from_lists(log_lists) } unset_operator_names(data) json_str = json.dumps(data, indent=4, sort_keys=True) print(json_str) else: for item in log_lists: show_logs(item['logs'], item['heading'])
def merge_log_lists( included_from_webpage, frozen_from_webpage, pending_from_webpage, disqualified_from_webpage, rejected_from_webpage, distrusted_from_webpage, other_from_webpage, special_from_webpage, all_from_webpage, # known-logs.html log_list_logs, # log_list.json all_logs, # all_logs_list.json **_): '''Merge log lists, warn on log list errors and return merged logs.''' # log lists # log_list.json contains the logs which are chrome ct policy compliant # # 'll_...' means: log is listed in log_list.json # 'nn_...' means: log is not listed in log_list.json ll_included = [] ll_frozen = [] ll_pending = [] ll_disqualified = [] ll_rejected = [] ll_distrusted = [] ll_other = [] ll_special = [] nn_included = [] nn_frozen = [] nn_pending = [] nn_disqualified = [] nn_rejected = [] nn_distrusted = [] nn_other = [] nn_special = [] # merge log_list.json with log lists from webpage ll_rest = log_list_logs ll_included, ll_rest, nn_included = \ merge_log_list_r(ll_rest, included_from_webpage) ll_frozen, ll_rest, nn_frozen = \ merge_log_list_r(ll_rest, frozen_from_webpage) ll_pending, ll_rest, nn_pending = \ merge_log_list_r(ll_rest, pending_from_webpage) ll_disqualified, ll_rest, nn_rejected = \ merge_log_list_r(ll_rest, disqualified_from_webpage) ll_rejected, ll_rest, nn_rejected = \ merge_log_list_r(ll_rest, rejected_from_webpage) ll_distrusted, ll_rest, nn_distrusted = \ merge_log_list_r(ll_rest, distrusted_from_webpage) ll_other, ll_rest, nn_other = \ merge_log_list_r(ll_rest, other_from_webpage) ll_special, ll_rest, nn_special = \ merge_log_list_r(ll_rest, special_from_webpage) # `ll_rest` now contains all logs from log_list.json which are not # listed on webpage (this list should be empty, else the webpage is missing # entries) # merge log lists with all_logs.json all_rest = all_logs # logs listed in all_logs.json ll_included, all_rest = \ merge_enrich_a_with_b(ll_included, all_rest) ll_frozen, all_rest = \ merge_enrich_a_with_b(ll_frozen, all_rest) ll_pending, all_rest = \ merge_enrich_a_with_b(ll_pending, all_rest) ll_disqualified, all_rest = \ merge_enrich_a_with_b(ll_disqualified, all_rest) ll_rejected, all_rest = \ merge_enrich_a_with_b(ll_rejected, all_rest) ll_distrusted, all_rest = \ merge_enrich_a_with_b(ll_distrusted, all_rest) ll_other, all_rest = \ merge_enrich_a_with_b(ll_other, all_rest) ll_special, all_rest = \ merge_enrich_a_with_b(ll_special, all_rest) nn_included, all_rest = \ merge_overwrite_a_with_b(nn_included, all_rest) nn_frozen, all_rest = \ merge_overwrite_a_with_b(nn_frozen, all_rest) nn_pending, all_rest = \ merge_overwrite_a_with_b(nn_pending, all_rest) nn_disqualified, all_rest = \ merge_overwrite_a_with_b(nn_disqualified, all_rest) nn_rejected, all_rest = \ merge_overwrite_a_with_b(nn_rejected, all_rest) nn_distrusted, all_rest = \ merge_overwrite_a_with_b(nn_distrusted, all_rest) nn_other, all_rest = \ merge_overwrite_a_with_b(nn_other, all_rest) nn_special, all_rest = \ merge_overwrite_a_with_b(nn_special, all_rest) # currently only special purpose logs are listet on webpage known-logs.html # # # warn for missing logs on webpage # # for log in ll_rest: # logger.warn(red(flo( # 'log in log_list.json not listet on webpage: {log.url}'))) # # for log in all_rest: # logger.warn(red(flo( # 'log in all_logs.json not listed on webpage: {log.url}'))) # warn for wrongly listed logs for log in ll_pending: logger.warn( red( flo('log pending for inclusion listed in log_list.json: {log.url}' ))) for log in ll_rejected: logger.warn(red(flo('rejeted log listet in log_list.json: {log.url}'))) for log in ll_distrusted: logger.warn( red(flo('distrusted log listet in log_list.json: {log.url}'))) for log in ll_other: logger.warn( red(flo('other purpose log listet in log_list.json: {log.url}'))) for log in nn_included: logger.warn( red( flo('chrome included log not listet in log_list.json: {log.url}' ))) for log in nn_frozen: logger.warn( red(flo( 'chrome frozen log not listet in log_list.json: {log.url}'))) ll_rest, all_rest = \ merge_enrich_a_with_b(ll_rest, all_rest) rest = ll_rest + all_rest # warn for logs only listed on webpage _, webpage_rest, _ = merge_log_list_r(all_from_webpage, log_list_logs) _, webpage_rest, _ = merge_log_list_r(all_from_webpage, all_logs) for log in webpage_rest: logger.warn( red( flo('log not listet in log_list.json nor all_logs.json: {log.url}' ))) return [ { 'heading': 'included logs (log_list.json, webpage)', 'logs': ll_included }, { 'heading': 'frozen logs (log_list.json, webpage)', 'logs': ll_frozen }, { 'heading': 'pending logs (log_list.json, webpage)', 'logs': ll_pending }, { 'heading': 'disqualified logs (log_list.json, webpage)', 'logs': ll_disqualified }, { 'heading': 'rejected logs (log_list.json, webpage)', 'logs': ll_rejected }, { 'heading': 'distrusted logs (log_list.json, webpage)', 'logs': ll_distrusted }, { 'heading': 'included logs NOT IN log_list.json ' '(webpage, all_logs.json)', 'logs': nn_included }, { 'heading': 'frozen logs NOT IN log_list.json (webpage, all_logs.json)', 'logs': nn_frozen }, { 'heading': 'pending logs (webpage, all_logs.json)', 'logs': nn_pending }, { 'heading': nn_disqualified, 'logs': nn_disqualified }, { 'heading': 'rejected logs (webpage, all_logs.json)', 'logs': nn_rejected }, { 'heading': 'distrusted logs (webpage, all_logs.json)', 'logs': nn_distrusted }, { 'heading': 'other logs (webpage, all_logs.json)', 'logs': nn_other }, { 'heading': 'special purpose logs (webpage, all_logs.json)', 'logs': nn_special }, { 'heading': 'UNLISTED ON WEBPAGE (log_list.json or all_logs.json)', 'logs': rest }, ]
LogEntryType = namedtuple(typename='LogEntryType', field_names='arg', lazy_vals={ '_parse_func': lambda _: _parse_log_entry_type, 'val': lambda self: self._parse['val'], 'is_x509_entry': lambda self: self.val == 0, 'is_precert_entry': lambda self: self.val == 1, '__str__': lambda self: lambda: 'x509_entry' if self.is_x509_entry else 'precert_entry' if self.is_precert_entry else flo( '<unknown log entry type {self.tdf}>'), }) def _parse_log_entry(tdf): with TdfBytesParser(tdf) as parser: entry_type = LogEntryType( parser.delegate('entry_type', _parse_log_entry_type)) # parse entry if entry_type.is_x509_entry: parser.delegate('entry', _parse_x509_chain_entry) parser.res['x509_entry'] = parser.res['entry'] elif entry_type.is_precert_entry: parser.delegate('entry', _parse_precert_chain_entry) parser.res['precert_entry'] = parser.res['entry']