def test_loop(opts, name='anon', verify=False, quiet=False, on_error=None): ''' Determine whether this sequence of optimizations loops. ''' logger = logging.getLogger('test_loops.test_loop') logger.info('%s: checking for %s-cycle', name, len(opts)) for o in compose_sequence(opts, on_error=on_error): if verify: verify_opt(o, quiet) o_src = count_src(o) for oo in loops.all_bin_compositions(o, o, on_error): if verify: verify_opt(oo, quiet) oo_src = count_src(oo) if oo_src > o_src: logger.info('%s increases: %s -> %s', o.name, o_src, oo_src) continue if loops.satisfiable(oo): logger.info('loop:\n%s\n%s\n', o, oo) return True else: logger.info('%s unsatisfiable', o.name) return False
def search_process(suite, limit, sequence_queue, result_queue, status_queue, log_config): logging.config.dictConfig(log_config) log = logger.getChild('search_process') log.info('Worker thread started, limit %s', limit) opts = loops.parse_transforms(open(suite).read()) log.debug('%s optimizations', len(opts)) info = [0,0,0,0] def count_error(e,o1,o2): info[ERRORS] += 1 while info[COUNT] < limit: s = sequence_queue.get() if s is None: log.info('Worker exiting %s', info) status_queue.put(info) return log.debug('Checking sequence %s', s) os = tuple(opts[i] for i in s) for o in loops.all_compositions(os): if info[COUNT] >= limit: break o_src = count_src(o) for oo in loops.all_bin_compositions(o, o, count_error): if info[COUNT] >= limit: break if info[COUNT] % 1000 == 0: log.info('Tested %s SatChecks %s Loops %s Errors %s', *info) info[COUNT] += 1 oo_src = count_src(oo) if o_src < oo_src: continue info[SAT_CHECKS] += 1 if not loops.satisfiable(oo): continue info[CYCLES] += 1 # TODO: put found loops into a queue result = 'Loop: {}\n{}\n\n{}'.format(o.name, '\n\n'.join(str(op) for op in os), o) result_queue.put(result) log.info(result) log.info('Worker exiting %s', info) status_queue.put(info)
def main(): logging.basicConfig(filename='find-loops.log', filemode='w') parser = argparse.ArgumentParser() parser.add_argument('length', type=int, help='Length of cycles to search for') parser.add_argument('file', type=argparse.FileType('r'), help='optimization suite to analyze') args = parser.parse_args() if args.length < 1: sys.stderr.write('cycle length must be positive\n') exit(1) sys.stderr.write('Reading ' + args.file.name + '\n') opts = loops.parse_transforms(args.file.read()) sys.stderr.write('{} optimizations'.format(len(opts))) count = 0 sat_checks = 0 cycles = 0 errors = [0] def count_error(e,o1,o2): errors[0] += 1 for o,_,os in search(opts, args.length): o_src = count_src(o) for oo in loops.all_bin_compositions(o,o,count_error): sys.stderr.write(status.format(count, sat_checks, cycles)) count += 1 oo_src = count_src(oo) if o_src < oo_src: continue sat_checks += 1 if not loops.satisfiable(oo): continue cycles += 1 print '\n-----\nLoop: ', o.name for opt in os: opt.dump() print o.dump() sys.stderr.write(status.format(count, sat_checks, cycles)) sys.stderr.write('\n') print print 'final count', count print 'loops', cycles print 'sat_checks', sat_checks print 'errors', errors[0]
def search_process(suite, length, prefix_queue, result_queue, status_queue, log_config): logging.config.dictConfig(log_config) log = logger.getChild("search_process") log.info("Worker thread started") opts = loops.parse_transforms(open(suite).read()) log.debug("%s optimizations", len(opts)) info = [0] * INFO_FLDS def count_error(e, o1, o2): info[ERRORS] += 1 while info[COMPS] < MAX_TESTS: p = prefix_queue.get() if p is None: log.info("Worker exiting %s", info) status_queue.put(info) prefix_queue.task_done() # make sure this happens after putting the info return log.info("Checking prefix %s", p) for o, os in search_after_prefix(opts, length, p, count_error): o_src = count_src(o) info[COMPS] += 1 if info[COMPS] % 1000 == 0: log.info("Seqs %s Comps %s SelfComps %s SatChecks %s Loops %s Errors %s", *info) for oo in loops.all_bin_compositions(o, o, count_error): info[SELF_COMPS] += 1 oo_src = count_src(oo) if o_src < oo_src: continue info[SAT_CHECKS] += 1 if not loops.satisfiable(oo): continue info[CYCLES] += 1 # TODO: put found loops into a queue result = "Loop: {}\n{}\n\n{}".format(o.name, "\n\n".join(str(op) for op in os), o) result_queue.put(result) log.info(result) info[SEQS] += prefix_size(p, length, len(opts)) prefix_queue.task_done() log.info("Worker exiting %s", info) status_queue.put(info)
def search_process(suite, length, limit, prefix_queue, result_queue, status_queue, log_config): logging.config.dictConfig(log_config) log = logger.getChild('search_process') log.info('Worker thread started') opts = loops.parse_transforms(open(suite).read()) log.debug('%s optimizations', len(opts)) info = [0] * INFO_FLDS def count_error(e,o1,o2): info[ERRORS] += 1 complete = True while info[COMPS] < limit: p = prefix_queue.get() if p is None: log.info('Worker exiting %s', info) status_queue.put(info) prefix_queue.task_done() # make sure this happens after putting the info return log.info('Checking prefix %s; %s remaining', p, limit-info[COMPS]) counters = [0] * length for o,os in search_after_prefix(opts, length, p, counters, count_error): o_src = count_src(o) info[COMPS] += 1 if info[COMPS] % 1000 == 0: log.info('Paths %s Comps %s Selfcomps %s SatChecks %s Loops %s Errors %s', *info) for oo in loops.all_bin_compositions(o, o, count_error): # TODO: can this just be compose? info[SELFCOMPS] += 1 oo_src = count_src(oo) if o_src < oo_src: continue info[SAT_CHECKS] += 1 if not loops.satisfiable(oo): continue info[CYCLES] += 1 # TODO: put found loops into a queue result = 'Loop: {}\n{}\n\n{}'.format(o.name, '\n\n'.join(str(op) for op in os), o) result_queue.put(result) log.info(result) if info[COMPS] >= limit: complete = False break if complete: psize = prefix_size(p, length, len(opts)) log.debug('Prefix %s size %s counters %s', p, psize, counters) info[SEQS] += psize else: psize = partial_prefix_size(p, counters, len(opts)) info[SEQS] += psize log.debug('Prefix %s size %s Info %s Counters %s', p, psize, info, counters) prefix_queue.task_done() log.info('Worker exiting %s', info) status_queue.put(info)