def load(fn_ins, simplify=False, corner=None, rm_zero=False): zero_names = OrderedSet() Ads, b = loadc_Ads_b(fn_ins, corner=corner, ico=True) if rm_zero: zero_names = rm_zero_cols(Ads) if simplify: print('Simplifying corner %s' % (corner, )) Ads, b = simplify_rows(Ads, b, remove_zd=False, corner=corner) return State(Ads, zero_names=zero_names)
def run(fout, fns_in, corner, verbose=0): Ads, b = loadc_Ads_b(fns_in, corner, ico=True) Ads, b = simplify_rows(Ads, b, corner=corner) fout.write('ico,fast_max fast_min slow_max slow_min,rows...\n') for row_b, row_ds in zip(b, Ads): # write in same format, but just stick to this corner out_b = [str(row_b) for _i in range(4)] ico = '1' items = [ico, ' '.join(out_b)] for k, v in sorted(row_ds.items()): items.append('%u %s' % (v, k)) fout.write(','.join(items) + '\n')
def load(fn_ins, simplify=False, corner=None): Ads, b = loadc_Ads_b(fn_ins, corner=corner, ico=True) if simplify: print('Simplifying corner %s' % (corner, )) Ads, b = simplify_rows(Ads, b, remove_zd=False, corner=corner) return State(Ads)
def massage_equations(Ads, b, verbose=False, corner=None): ''' Subtract equations from each other to generate additional constraints Helps provide additional guidance to solver for realistic delays Ex: given: a >= 10 a + b >= 100 A valid solution is: a = 100 However, a better solution is something like a = 10 b = 90 This creates derived constraints to provide more realistic results Equation pipeline Some operations may generate new equations Simplify after these to avoid unnecessary overhead on redundant constraints Similarly some operations may eliminate equations, potentially eliminating a column (ie variable) Remove these columns as necessary to speed up solving ''' assert len(Ads) == len(b), 'Ads, b length mismatch' def check_cols(): assert len(index_names(Ads)) == cols def debug(what): check_cols() if 1 or verbose: print('') print_eqns(Ads, b, verbose=verbose, label=what, lim=20) col_dist(Ads, what) #check_feasible_d(Ads, b) # Try to (intelligently) subtract equations to generate additional constraints # This helps avoid putting all delay in a single shared variable dstart = len(b) cols = len(index_names(Ads)) # Each iteration one more column is allowed until all columns are included # (and the system is stable) col_lim = 15 di = 0 while True: print n_orig = len(b) print('Loop %d, lim %d' % (di + 1, col_lim)) # Meat of the operation Ads, b = derive_eq_by_row(Ads, b, col_lim=col_lim, cmp_heuristic=True) debug("der_rows") # Run another simplify pass since new equations may have overlap with original Ads, b = simplify_rows(Ads, b, corner=corner) print('Derive row: %d => %d equations' % (n_orig, len(b))) debug("der_rows simp") # derive_cols is mostly degenerate case of derive_rows # however, it will sub out single variables a lot faster if there are a lot of them # linear vs above quadratic, might as well keep it in if 1: n_orig2 = len(b) # Meat of the operation Ads, b = derive_eq_by_col(Ads, b) debug("der_cols") # Run another simplify pass since new equations may have overlap with original Ads, b = simplify_rows(Ads, b, corner=corner) print('Derive col %d: %d => %d equations' % (di + 1, n_orig2, len(b))) debug("der_cols simp") # Doesn't help computation, but helps debugging Ads, b = sort_equations(Ads, b) debug("loop done") col_dist(Ads, 'derive done iter %d, lim %d' % (di, col_lim), lim=12) rows = len(Ads) # possible that a new equation was generated and taken away, but close enough if n_orig == len(b) and col_lim >= cols: break col_lim += col_lim / 5 di += 1 dend = len(b) print('') print('Derive net: %d => %d' % (dstart, dend)) print('') # Was experimentting to see how much the higher order columns really help # Helps debug readability Ads, b = sort_equations(Ads, b) debug("final (sorted)") print('') print('Massage final: %d => %d rows' % (dstart, dend)) cols_end = len(index_names(Ads)) print('Massage final: %d => %d cols' % (cols, cols_end)) assert cols_end == cols return Ads, b
def run(fns_in, corner, run_corner, sub_json=None, bounds_csv=None, dedup=True, massage=False, outfn=None, verbose=False, **kwargs): print('Loading data') Ads, b = loadc_Ads_b(fns_in, corner, ico=True) # Remove duplicate rows # is this necessary? # maybe better to just add them into the matrix directly if dedup: oldn = len(Ads) iold = instances(Ads) Ads, b = simplify_rows(Ads, b, corner=corner) print('Simplify %u => %u rows' % (oldn, len(Ads))) print('Simplify %u => %u instances' % (iold, instances(Ads))) if sub_json: print('Sub: %u rows' % len(Ads)) iold = instances(Ads) names_old = index_names(Ads) run_sub_json(Ads, sub_json, verbose=verbose) names = index_names(Ads) print("Sub: %u => %u names" % (len(names_old), len(names))) print('Sub: %u => %u instances' % (iold, instances(Ads))) else: names = index_names(Ads) ''' Substitution .csv Special .csv containing one variable per line Used primarily for multiple optimization passes, such as different algorithms or additional constraints ''' if bounds_csv: Ads2, b2 = loadc_Ads_b([bounds_csv], corner, ico=True) bounds = Ads2bounds(Ads2, b2) assert len(bounds), 'Failed to load bounds' rows_old = len(Ads) Ads, b = filter_bounds(Ads, b, bounds, corner) print('Filter bounds: %s => %s + %s rows' % (rows_old, len(Ads), len(Ads2))) Ads = Ads + Ads2 b = b + b2 assert len(Ads) or allow_zero_eqns() assert len(Ads) == len(b), 'Ads, b length mismatch' if verbose: print print_eqns(Ads, b, verbose=verbose) #print #col_dist(A_ubd, 'final', names) if massage: try: Ads, b = massage_equations(Ads, b, corner=corner) except SimplifiedToZero: if not allow_zero_eqns(): raise print('WARNING: simplified to zero equations') Ads = [] b = [] print('Converting to numpy...') names, Anp = A_ds2np(Ads) run_corner(Anp, np.asarray(b), names, corner, outfn=outfn, verbose=verbose, **kwargs)