Beispiel #1
0
def main():
    # Decoder is supposed to have one accepting state.
    accepting_states = [state for state in dfa.states if state.is_accepting]
    assert accepting_states == [dfa.initial_state]

    assert not dfa.initial_state.any_byte

    num_suffixes = dfa_traversal.GetNumSuffixes(dfa.initial_state)
    # We can't just write 'num_suffixes[dfa.initial_state]' because
    # initial state is accepting.
    total_instructions = sum(
        num_suffixes[t.to_state]
        for t in dfa.initial_state.forward_transitions.values())
    print total_instructions, 'instructions total'

    tasks = dfa_traversal.CreateTraversalTasks(dfa.states, dfa.initial_state)
    print len(tasks), 'tasks'

    pool = multiprocessing.Pool()

    results = pool.imap(Worker, tasks)

    total = 0
    for prefix, count in results:
        print ', '.join(map(hex, prefix))
        total += count

    print total, 'instructions were processed'
Beispiel #2
0
def main():
    assert dfa.initial_state.is_accepting
    assert not dfa.initial_state.any_byte

    print(len(dfa.states), 'states')

    num_suffixes = dfa_traversal.GetNumSuffixes(dfa.initial_state)

    # We can't just write 'num_suffixes[dfa.initial_state]' because
    # initial state is accepting.
    total_instructions = sum(
        num_suffixes[t.to_state]
        for t in dfa.initial_state.forward_transitions.values())
    print(total_instructions, 'regular instructions total')

    tasks = dfa_traversal.CreateTraversalTasks(dfa.states, dfa.initial_state)
    print(len(tasks), 'tasks')

    pool = multiprocessing.Pool()

    results = pool.imap(Worker, tasks)

    total = 0
    num_valid = 0
    for prefix, count, valid_count in results:
        print(', '.join(map(hex, prefix)))
        total += count
        num_valid += valid_count

    print(total, 'instructions were processed')
    print(num_valid, 'valid instructions')
Beispiel #3
0
def main():
    # We keep these global to share state graph between workers spawned by
    # multiprocess. Passing it every time is slow.
    global options, xml_file
    global dfa
    global worker_validator
    options, xml_file = ParseOptions()
    dfa = dfa_parser.ParseXml(xml_file)
    worker_validator = validator.Validator(validator_dll=options.validator_dll,
                                           decoder_dll=options.decoder_dll)

    assert dfa.initial_state.is_accepting
    assert not dfa.initial_state.any_byte

    sys.stderr.write('%d states\n' % len(dfa.states))
    num_suffixes = dfa_traversal.GetNumSuffixes(dfa.initial_state)
    num_instructions = sum(
        num_suffixes[t.to_state]
        for t in dfa.initial_state.forward_transitions.values())
    sys.stderr.write('%d instructions\n' % num_instructions)
    tasks = dfa_traversal.CreateTraversalTasks(dfa.states, dfa.initial_state)
    sys.stderr.write('%d tasks\n' % len(tasks))

    pool = multiprocessing.Pool()
    results = pool.imap_unordered(Worker, tasks)

    total = 0
    num_valid = 0

    node_cache = trie.NodeCache()
    full_trie = node_cache.empty_node

    # The individual workers create subtries that we merge in and compress here.
    for count, valid_count, sub_trie, in results:
        total += count
        num_valid += valid_count
        full_trie = node_cache.Merge(full_trie, sub_trie)
        sys.stderr.write('%.2f%% completed\n' %
                         (total * 100.0 / num_instructions))
    sys.stderr.write('%d instructions were processed\n' % total)
    sys.stderr.write('%d valid instructions\n' % num_valid)

    trie.WriteToFile(options.trie_path, full_trie)
def main():
  assert dfa.initial_state.is_accepting
  assert not dfa.initial_state.any_byte

  print len(dfa.states), 'states'

  num_suffixes = dfa_traversal.GetNumSuffixes(dfa.initial_state)

  # We can't just write 'num_suffixes[dfa.initial_state]' because
  # initial state is accepting.
  total_instructions = sum(
      num_suffixes[t.to_state]
      for t in dfa.initial_state.forward_transitions.values())
  print total_instructions, 'regular instructions total'

  tasks = dfa_traversal.CreateTraversalTasks(dfa.states, dfa.initial_state)
  print len(tasks), 'tasks'

  pool = multiprocessing.Pool()

  results = pool.imap(Worker, tasks)

  total = 0
  num_valid = 0
  errors = []
  for prefix, count, valid_count, more_errors in results:
    print ', '.join(map(hex, prefix))
    total += count
    num_valid += valid_count
    errors += more_errors

  print total, 'instructions were processed'
  print num_valid, 'valid instructions'

  print len(errors), 'errors'

  errors.sort()
  with open(options.errors, 'w') as errors_file:
    errors_file.write(
        'Instructions accepted by new validator but rejected by old one:\n')
    for disassembly, bytes in errors:
      hex_bytes = ' '.join('%02x' % byte for byte in bytes).replace('0x', '')
      errors_file.write('%-50s %s\n' % (disassembly, hex_bytes))