Exemple #1
0
def init_routing_vars(cgra : MRRG, design : Design, vars : Modeler, solver : Solver) -> Term:
    bv1 = solver.BitVec(1)
    for node in cgra.all_nodes:
        for value in design.values:
            vars.init_var((node, value), bv1)
            for dst in value.dsts:
                vars.init_var((node, value, dst), bv1)
    return solver.TheoryConst(solver.Bool(), True)
Exemple #2
0
def pe_exclusivity(cgra : MRRG, design : Design, vars : Modeler, solver : Solver) -> Term:
    ''' Assert all PEs are used at most one time '''
    bv = solver.BitVec(len(design.operations))
    c = []
    for pe in cgra.functional_units:
        pe_vars = vars.anonymous_var(bv)
        for idx, op in enumerate(design.operations):
            c.append(pe_vars[idx] == vars[pe, op])
        c.append(_is_one_hot_or_0(pe_vars, solver))

    return solver.And(c)
Exemple #3
0
    def _modeler_factory(self):
        m = Modeler(
            #                    line_width=self.line_width,
            #                    arrhenius_plot_type=self.arrhenius_plot_type
        )
        bind_preference(m, 'logr_ro_line_width',
                        'pychron.mdd.logr_ro_line_width')
        bind_preference(m, 'arrhenius_plot_type', 'pychron.mdd.plot_type')
        bind_preference(m, 'clovera_directory', 'pychron.mdd.clovera_dir')
        bind_preference(m, 'data_directory', 'pychron.mdd.data_dir')

        return m
Exemple #4
0
    def __init__(
        self,
        cgra: MRRG,
        design: Design,
        solver_str: str,
        seed: int = 0,
        incremental: bool = False,
        duplicate_const: bool = False,
        duplicate_all: bool = False,
    ):

        if duplicate_all:
            for op in design.operations:
                op.allow_duplicate()
        elif duplicate_const:
            for op in design.operations:
                if op.opcode == 'const':
                    op.allow_duplicate()

        self._cgra = cgra
        self._design = design
        self._incremental = incremental

        self._solver = solver = smt(solver_str)
        self._solver_opts = solver_opts = [('random-seed', seed),
                                           ('produce-models', 'true')]
        if incremental:
            solver_opts.append(('incremental', 'true'))

        if solver_str == 'CVC4':
            if incremental:
                solver_opts.append(('bv-sat-solver', 'cryptominisat'))
            else:
                solver_opts.append(('bv-sat-solver', 'cadical'))
                #solver_opts.append(('bitblast', 'eager'))

        self._init_solver()
        self._vars = Modeler(solver)
        self._model = None
Exemple #5
0
def op_placement(cgra : MRRG, design : Design, vars : Modeler, solver : Solver) -> Term:
    ''' Assert all ops are placed exactly one time
    unless they can be duplicated in which case assert they are placed '''
    bv = solver.BitVec(len(cgra.functional_units))
    c = []
    for op in design.operations:
        if op.duplicate:
            c.append(ft.reduce(solver.BVOr, (vars[pe, op] for pe in cgra.functional_units)) == 1)
        else:
            op_vars = vars.anonymous_var(bv)
            for idx,pe in enumerate(cgra.functional_units):
                c.append(op_vars[idx] == vars[pe, op])
            c.append(_is_one_hot(op_vars, solver))

    return solver.And(c)
Exemple #6
0
def route_exclusivity(cgra : MRRG, design : Design, vars : Modeler, solver : Solver) -> Term:
    '''
        each routing node is used for at most one value

        for all node in nodes:
            popcount(vars[node, value] for value in values) <= 1
    '''
    bv = solver.BitVec(len(design.values))
    c = []
    for node in cgra.all_nodes:
        node_vars = vars.anonymous_var(bv)
        for idx, value in enumerate(design.values):
            c.append(node_vars[idx] == vars[node, value])
        c.append(_is_one_hot_or_0(node_vars, solver))

    return solver.And(c)
 def __init__(self, config):
     self.config = config
     metrics = Metrics().get()
     m = Modeler().get()
     loss = Loss().get()
     optimizer = Optimizer().get()
     console.log("Model has", m.count_params(), "params")
     m.compile(loss=loss, optimizer=optimizer, metrics=metrics)
     m.summary(line_length=150)
     self.model = m
     # need to know so that we can avoid rounding errors with spectrogram
     # this should represent how much the input gets downscaled
     # in the middle of the network
     self.peakDownscaleFactor = 4
Exemple #8
0
def output_connectivity(cgra : MRRG, design : Design, vars : Modeler, solver : Solver) -> Term:
    '''
        if node used to route a value then exactly one of its outputs also
        routes that value
    '''
    c = []
    for node in cgra.all_nodes:
        bv = solver.BitVec(len(node.outputs.values()))
        for value in design.values:
            if isinstance(node, mrrg.FU_Port):
                continue
            for dst in value.dsts:
                v = vars[node, value, dst]
                i_vars = vars.anonymous_var(bv)
                for idx, n in enumerate(node.outputs.values()):
                    c.append(i_vars[idx] == vars[n, value, dst])
                c.append(solver.Or(v == 0, _is_one_hot(i_vars, solver)))

    return solver.And(c)
Exemple #9
0
def init_popcount_concat(
        node_filter : NodeFilter,
        cgra : MRRG,
        design : Design,
        vars : Modeler,
        solver : Solver) -> Term:

    nodes = [n for n in cgra.all_nodes if node_filter(n)]
    width = len(nodes).bit_length()
    zero = solver.TheoryConst(solver.BitVec(width - 1), 0)
    zeroExt = ft.partial(solver.Concat, zero)
    expr = ft.reduce(solver.BVAdd,
            map(zeroExt,
                (ft.reduce(solver.BVOr,
                    (vars[n, v] for v in design.values)
                ) for n in nodes)
            )
        )

    pop_count = vars.init_var(node_filter, expr.sort)
    return expr == pop_count
Exemple #10
0
def main():

    # Process command-line args. 
    desc = 'This application run an end-to-end MERRA/Max process.'

    parser = argparse.ArgumentParser(description = desc)
    
    parser.add_argument('-f',
                        required = True, 
                        help = 'path to file of presence points')
    
    parser.add_argument('-o', default = '.', help = 'path to output directory')
    
    parser.add_argument('-p', 
                        default = 10, 
                        help = 'number of concurrent processes to run')
    
    parser.add_argument('-s',
						required = True,
                        help = 'species name')
    
    parser.add_argument('-t',
                        default = 10,
                        help='number of trials for selecting top-ten predictors')
    
    parser.add_argument('--startDate', help = 'MM-DD-YYYY')
    parser.add_argument('--endDate', help = 'MM-DD-YYYY')

    args = parser.parse_args()
    
    # Run the process.
    c = ConfigureMmxRun(args.f, args.startDate, args.endDate, args.s, args.o, 
                        args.p, args.t)
    
    GetMerra     (c.config.configFile).run()
    PrepareImages(c.config.configFile).run()
    PrepareTrials(c.config.configFile).run()
    RunTrials    (c.config.configFile).run()
    Selector     (c.config.configFile).run()
    Modeler      (c.config.configFile).run()
Exemple #11
0
def init_popcount_ite(
        node_filter : NodeFilter,
        cgra : MRRG,
        design : Design,
        vars : Modeler,
        solver : Solver) -> Term:

    nodes = [n for n in cgra.all_nodes if node_filter(n)]
    bv = solver.BitVec(len(nodes).bit_length())
    zero = solver.TheoryConst(bv, 0)
    one  = solver.TheoryConst(bv, 1)

    expr = ft.reduce(solver.BVAdd,
            map(lambda x : solver.Ite(x == 0, zero, one),
                (ft.reduce(solver.BVOr,
                    (vars[n, v] for v in design.values)
                ) for n in nodes)
            )
        )

    pop_count = vars.init_var(node_filter, bv)
    return expr == pop_count
Exemple #12
0
def main(name, maxeval, metric):
    """Triggers experiment looping through ML algorithms

    Args:
        name: name of experiment
        maxeval: maximum number of evaluation
        metric: name of metric to minimize cost function
    """
    mlflow.set_experiment(name)
    MAX_EVALS = maxeval
    METRIC = metric

    space = [{
        'max_depth': hp.choice('max_depth', range(1, 20)),
        'max_features': hp.choice('max_features', range(1, 26)),
        'n_estimators': hp.choice('n_estimators', range(100, 500)),
        'criterion': hp.choice('criterion', ["gini", "entropy"])
    }, {
        'var_smoothing':
        hp.uniform('var_smoothing', 0.000000001, 0.000001)
    }]

    X_train, X_test, y_train, y_test = Modeler().prepro()

    for index, algo in enumerate([RandomForestClassifier, GaussianNB]):
        with mlflow.start_run(run_name=str(algo)) as run:
            trials = Trials()
            train_objective = build_train_objective(algo, X_train, y_train,
                                                    X_test, y_test, METRIC)
            hyperopt.fmin(fn=train_objective,
                          space=space[index],
                          algo=hyperopt.tpe.suggest,
                          max_evals=MAX_EVALS,
                          trials=trials)
            log_best(run, METRIC)
            # search_run_id = run.info.run_id
            # experiment_id = run.info.experiment_id
            mlflow.end_run()
Exemple #13
0
def load_dataset(task: str, DELIMITER='#'):
    set_seeds()
    if task == "Amazon":
        df_train, df_dev, df_valid, df_test, df_test_heldout = load_amazon_dataset(
            delimiter=DELIMITER)

    elif task == "Youtube":
        df_train, df_dev, df_valid, df_test, df_test_heldout = load_youtube_dataset(
            delimiter=DELIMITER)

    elif task == "Film":
        df_train, df_dev, df_valid, df_test, df_test_heldout = load_film_dataset(
        )

    elif (task == "News") or (task == "Debug"):
        df_train, df_dev, df_valid, df_test, df_test_heldout = load_news_dataset(
        )

    global modeler
    modeler = Modeler(df_train, df_dev, df_valid, df_test, df_test_heldout)
    update_stats({}, "load_data")

    return (df_train, df_dev, df_valid, df_test)
Exemple #14
0
def init_placement_vars(cgra : MRRG, design : Design, vars : Modeler, solver : Solver) -> Term:
    bv1 = solver.BitVec(1)
    for pe in cgra.functional_units:
        for op in design.operations:
            vars.init_var((pe, op), bv1)
    return solver.TheoryConst(solver.Bool(), True)
#!/usr/bin/env python
import roslib

roslib.load_manifest("perfect_model")

import time
import numpy as np
import cv2

import matplotlib.pyplot as plt

from modeler import Modeler

modeler = Modeler()


def main():
    print "OpenCV version " + cv2.__version__

    cap = cv2.VideoCapture(0)
    start = time.time()
    while (True):
        # Capture frame-by-frame
        ret, frame = cap.read()

        # Our operations on the frame come here
        gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
        modeler.run(gray)
        num_newpoints_list, percent_inliers_list = modeler.stats()
        # Display the resulting frame
        cv2.imshow('frame', gray)
Exemple #16
0
    model2 = Sequential()
    model2.add(Dense(128, activation='relu', input_dim=784))
    model2.add(Dense(10, activation='softmax'))
    model2.compile(optimizer='rmsprop',
                   loss='categorical_crossentropy',
                   metrics=['accuracy'])

    model3 = Sequential()
    model3.add(Dense(256, activation='relu', input_dim=784))
    model3.add(Dense(10, activation='softmax'))
    model3.compile(optimizer='rmsprop',
                   loss='categorical_crossentropy',
                   metrics=['accuracy'])

    return (model1, model2, model3)


if __name__ == "__main__":

    data = load_data()

    (model1, model2, model3) = generate_models()

    modeler = Modeler()
    modeler.add(model1)
    modeler.add(model2)
    modeler.add(model3)
    modeler.start(data, epochs=12)
    modeler.save(n_save=1)
Exemple #17
0
def test_modeler_model():
    Modeler().fit()
    assert os.path.isfile(modelpath) == True
from plotter import plot_data_1, plot_data_2
from modeler import Modeler


if __name__ == '__main__':
    plot = True
    # plot = False

    # Filter dataframe
    pd.options.display.max_columns = None
    df = pd.read_csv('data/owid-covid-data.csv')
    columns_filter = ['iso_code', 'location', 'date', 'total_cases',
                      'new_cases', 'total_deaths', 'new_deaths']
    df_indo = df[df['location'] == 'Indonesia'][columns_filter]
    df_indo['date'] = pd.to_datetime(df_indo['date'], format='%Y-%m-%d')
    # print(df_indo)

    # Plot data
    if plot:
        save = True
        # save = False
        # Plot actual data
        plot_data_1(df_indo.iloc[30:], save)
        plot_data_2(df_indo.iloc[30:], 'new_cases', 'blue', save)
        plot_data_2(df_indo.iloc[30:], 'new_deaths', 'red', save)
        # Plot prediction data
        m = Modeler(df_indo.iloc[65:])
        m.plot_observed_and_expected_total_case(90, save)
        print()
        m.plot_observed_and_expected_new_case(90, save)
Exemple #19
0
async def on_message(message):
    bot_name = str(client.user).split("#")[0]
    target_name = str(message.author.name)
    str_author = str(message.author)
    prefix = "user_data"
    if message.author == client.user:
        return
    if message.channel.type == discord.ChannelType.private:
        print(target_name)
        print(active_sessions.keys())
        if target_name not in active_sessions.keys():
            # print("session not active")
            active_sessions[target_name] = False
            session_count[target_name] = 0
            msg_count[target_name] = 0

        if not active_sessions[target_name]:
            if f"Bonjour {bot_name}" in message.content:
                active_sessions[target_name] = True
                sentence_buffer[target_name] = ""
                if target_name not in target_modelers.keys():
                    target_modelers[target_name] = Modeler(target_name)
                    if not os.path.exists(f"{prefix}/{str_author}"):
                        os.makedirs(f"{prefix}/{str_author}")
                    target_modelers[target_name].save_profile(
                        f"{prefix}/{str_author}/{str_author}_profile.json")
                else:
                    target_modelers[target_name].load_profile(
                        f"{prefix}/{str_author}/{str_author}_profile.json")
                time.sleep(1)
                await message.channel.send(f"Bonjour {target_name} !")
                time.sleep(.7)
                await message.channel.send(
                    f"Je suis {bot_name}, le robot qui écoute les problèmes ! Mon rôle est de déchiffrer tes 'méta-programmes' afin d'identifier les meilleurs vecteurs d'amélioration selon ta personnalité."
                )
                time.sleep(1)
                await message.channel.send(
                    "Ainsi, j'aimerais que tu me parles d'un élément de ta vie que tu souhaiterais améliorer afin que l'on puisse ensemble l'analyser en profondeur. Cela peut être lié aux hobbies, au travail, aux relations ..."
                )
                time.sleep(1.5)
                await message.channel.send(
                    "Note: je ne réponds que lorsque que ton message sera terminé par un point."
                )
                session_count[target_name] += 1
                session_answerer = Answerer(session_count)
                session_answerer.load_answer_list("templates/meta_answers.csv")
                target_answerers[target_name] = session_answerer
                # TODO: Potentiellement demander si prise en compte des conversations passées si nb session > 1
                time.sleep(1.5)
                await message.channel.send(
                    f"De quoi allons-nous parler aujourd'hui ?")
                time.sleep(.7)
                await message.channel.send(
                    f"(Écrire 'Merci {bot_name}' pour mettre fin à la discussion)"
                )
            else:
                await message.channel.send(
                    f"Vous pouvez écrire 'Bonjour {bot_name}' pour lancer la discussion !"
                )
        else:
            if message.content == f"Merci {bot_name}":
                print("end_message")
                time.sleep(1)
                await message.channel.send(f"Bonne journée {target_name} !")
                active_sessions[target_name] = False
                if not os.path.exists(f"{prefix}/{str_author}"):
                    os.makedirs(f"{prefix}/{str_author}")
                print(target_modelers[target_name].profile)
                target_answerers[target_name].save_conversation_data(
                    f"{prefix}/{str_author}/{str_author}_{datetime.now()}_{session_count[target_name]}.csv"
                )
                target_modelers[target_name].save_profile(
                    f"{prefix}/{str_author}/{str_author}_profile.json")
            else:
                print("normal_message")
                session_answerer = target_answerers[target_name]
                session_modeler = target_modelers[target_name]
                if ("." in message.content) or ("!" in message.content) or (
                        "?" in message.content):
                    sentence_list = [
                        msg.strip()
                        for msg in re.split('[.!?]+', message.content)
                    ]
                    print(sentence_list)
                    if sentence_buffer[target_name] != "":
                        current_sentence = sentence_buffer[
                            target_name] + ' ' + sentence_list[0]
                    else:
                        current_sentence = sentence_list[0]
                    session_answerer.update_conversation(current_sentence)
                    session_modeler = session_modeler.update_profile(
                        current_sentence)
                    session_answerer.update_target_profile(
                        session_modeler.profile)
                    sentence_buffer[target_name] = ""
                    msg_count[target_name] += len(sentence_list[:-1])
                    for current_sentence in sentence_list[1:-1]:
                        session_answerer.update_conversation(current_sentence)
                        session_modeler = session_modeler.update_profile(
                            current_sentence)
                        session_answerer.update_target_profile(
                            session_modeler.profile)
                    if sentence_list[-1] == "":
                        sentence_buffer[target_name] = ""
                        session_answerer.nb_answers = msg_count[target_name]
                        response = session_answerer.get_answer()
                        response_time = max(
                            1.0, 0.2 * len(message.content.split(" ")))
                        time.sleep(response_time)
                        await message.channel.send(response)
                    else:
                        sentence_buffer[target_name] = sentence_list[-1]
                else:
                    if sentence_buffer[target_name] != "":
                        sentence_buffer[target_name] = sentence_buffer[
                            target_name] + ' ' + message.content
                    else:
                        sentence_buffer[target_name] = message.content

    else:
        await message.channel.send(f"Venez discutez par message privé !")
Exemple #20
0
def init_popcount_bithack(
        node_filter : NodeFilter,
        cgra : MRRG,
        design : Design,
        vars : Modeler,
        solver : Solver) -> Term:

    def _build_grouped_mask(k, n):
        '''
        build_grouped_mask :: int -> int -> Term
        returns the unique int m of length n that matches the following RE
        ((0{0,k} 1{k}) | (1{0,k})) (0{k} 1{k})*
        '''
        m = 0
        for i in range(k):
            m |= 1 << i
        c = 2*k
        while c < n:
            m |= m << c
            c *= 2
        return solver.TheoryConst(solver.BitVec(n), m)

    def _is_power_of_2(x : int) -> bool:
        return x & (x - 1) == 0

    def _floor_log2(x : int) -> int:
        return x.bit_length() - 1

    def _prev_power_of_2(x : int) -> int:
        return 1 << _floor_log2(x - 1)

    def _next_power_of_2(x : int) -> int:
        return 1 << x.bit_length()


    constraints = []
    vs = [vars[n, v] for n in cgra.all_nodes if node_filter(n) for v in design.values]
    width = len(vs)
    # build a bitvector from the concanation of bits
    bv = vars.anonymous_var(solver.BitVec(width))

    for idx,v in enumerate(vs):
        constraints.append(bv[idx] == v)

    # Boolector can't handle lshr on non power of 2, so zero extend
    if solver.solver_name == 'Boolector' and not _is_power_of_2(width):
        l = _next_power_of_2(width)
        bv = solver.Concat(solver.TheoryConst(solver.BitVec(l - width), 0), bv)

    width = bv.sort.width
    pop_count = vars.init_var(node_filter, bv.sort)

    if width <= 1:
        constraints.append(pop_count == bv)
        return solver.And(constraints)
    elif width == 2:
        constraints.append(pop_count == (bv & 1) + (bv >> 1))
        return solver.And(constraints)

    max_shift = _prev_power_of_2(width)

    def _mask_shift_add(x, shift):
        mask = _build_grouped_mask(shift, width)
        return (x & mask) + ((x >> shift) & mask)

    shifts = it.takewhile(lambda n : n <= max_shift, (1 << i for i in it.count()))
    x = ft.reduce(_mask_shift_add, shifts, bv)

    constraints.append(pop_count == x)
    return solver.And(constraints)
Exemple #21
0
def test_modeler_output(df):
    assert Modeler().predict(df) in (0,1)
Exemple #22
0
class PNR:
    _cgra: MRRG
    _design: Design
    _vars: Modeler
    _solver: smt_switch_types.Solver
    _model: tp.Optional[Model]
    _solver_opts: tp.List[tp.Tuple[str, str]]
    _incremental: bool

    def __init__(
        self,
        cgra: MRRG,
        design: Design,
        solver_str: str,
        seed: int = 0,
        incremental: bool = False,
        duplicate_const: bool = False,
        duplicate_all: bool = False,
    ):

        if duplicate_all:
            for op in design.operations:
                op.allow_duplicate()
        elif duplicate_const:
            for op in design.operations:
                if op.opcode == 'const':
                    op.allow_duplicate()

        self._cgra = cgra
        self._design = design
        self._incremental = incremental

        self._solver = solver = smt(solver_str)
        self._solver_opts = solver_opts = [('random-seed', seed),
                                           ('produce-models', 'true')]
        if incremental:
            solver_opts.append(('incremental', 'true'))

        if solver_str == 'CVC4':
            if incremental:
                solver_opts.append(('bv-sat-solver', 'cryptominisat'))
            else:
                solver_opts.append(('bv-sat-solver', 'cadical'))
                #solver_opts.append(('bitblast', 'eager'))

        self._init_solver()
        self._vars = Modeler(solver)
        self._model = None

    def _check_pigeons(self) -> bool:
        op_hist = Counter()
        pe_hist = Counter()
        for op in self.design.operations:
            op_hist[op.opcode] += 1

        for pe in self.cgra.functional_units:
            for op in pe.ops:
                pe_hist[op] += 1
        for op, n in op_hist.items():
            if pe_hist[op] < n:
                return False
        else:
            return True

    def _reset(self) -> None:
        self._vars.reset()
        self._solver.Reset()
        self._init_solver()
        self._model = None

    def _init_solver(self) -> None:
        solver = self._solver

        solver.SetLogic('QF_BV')
        for opts in self._solver_opts:
            solver.SetOption(*opts)

        if self._solver.solver_name == 'Boolector':
            if self._incremental:
                self._solver._solver._btor.Set_sat_solver("Lingeling")
            else:
                self._solver._solver._btor.Set_sat_solver("CaDiCaL")

    def pin_module(self, module, placement):
        raise NotImplementedError()

    def pin_tie(self, tie, placement):
        raise NotImplementedError()

    def map_design(self,
                   init_funcs: ConstraintGeneratorList,
                   funcs: ConstraintGeneratorList,
                   verbose: bool = False) -> None:
        solver = self._solver
        args = self.cgra, self.design, self._vars, solver

        if verbose:
            for f in it.chain(init_funcs, funcs):
                print(f.__qualname__, end='... ', flush=True)
                c = f(*args)
                print('done', flush=True)
                solver.Assert(c)
        else:
            for f in it.chain(init_funcs, funcs):
                solver.Assert(f(*args))

    def satisfy_design(
        self,
        init_funcs: ConstraintGeneratorList,
        funcs: ConstraintGeneratorList,
        verbose: bool = False,
        attest_func: tp.Optional[ModelReader] = None,
        first_cut: tp.Optional[tp.Callable[[int, int], int]] = None,
        build_timer: tp.Optional[Timer] = None,
        solve_timer: tp.Optional[Timer] = None,
    ) -> bool:
        pass

    def optimize_enum(
        self,
        optimizer: optimization.Optimizer,
        init_funcs: ConstraintGeneratorList,
        funcs: ConstraintGeneratorList,
        verbose: bool = False,
        attest_func: tp.Optional[ModelReader] = None,
        build_timer: tp.Optional[Timer] = None,
        solve_timer: tp.Optional[Timer] = None,
        cutoff: tp.Optional[float] = None,
        return_bounds: bool = False,
        max_sol: int = 5000,
    ) -> bool:
        if not verbose:
            log = lambda *args, **kwargs: None
        else:
            log = ft.partial(print, sep='', flush=True)

        if not self._check_pigeons():
            log('Infeasible: too many pigeons')
            if return_bounds:
                return (False, None, None)
            else:
                return False

        solver = self._solver
        vars = self._vars
        cgra = self.cgra
        design = self.design
        args = cgra, design, vars, solver
        incremental = self._incremental

        if attest_func is None:
            attest_func: ModelReader = lambda *args: True

        if build_timer is None:
            build_timer = NullTimer()

        if solve_timer is None:
            solve_timer = NullTimer()

        if cutoff is None:

            def check_cutoff(lower, upper):
                return False
        elif cutoff == 0:

            def check_cutoff(lower, upper):
                return lower < upper
        else:

            def check_cutoff(lower, upper):
                return (upper - lower) / upper > cutoff

        if incremental:
            sat_cb = lambda: None
        else:
            sat_cb = self._reset

        def apply(*funcs: ConstraintGeneratorType):
            if not funcs:
                return
            log('Building constraints:')
            build_timer.start()
            for f in funcs:
                log('  ', f.__qualname__, end='... ')
                solver.Assert(f(*args))
                log('done')

            build_timer.stop()
            log('---\n')

        def do_checksat():
            solve_timer.start()
            s = solver.CheckSat()
            solve_timer.stop()
            return s

        def not_this(model: Model, vars: Modeler) -> int:
            build_timer.start()
            solver = vars._solver
            kx = []
            tx = []
            for k, v in model.items():
                n = k[0]
                if v == 1 and len(k) == 2 and optimizer.node_filter(n):
                    kx.append(k)
            assert kx
            c = []
            t = ft.reduce(solver.BVAnd, (vars[k] for k in kx))
            solver.Assert(t == 0)
            build_timer.stop()

        eval_func = optimizer.eval_func
        lower_func = optimizer.lower_func

        if cutoff is None:
            funcs = *init_funcs, *funcs
        else:
            funcs = *init_funcs, *funcs

        apply(*funcs)

        if incremental:
            funcs = ()

        if do_checksat():
            init_time = solve_timer.total + build_timer.total
            print(init_time)
            lower = 0
            sol = 1
            best = m = vars.save_model()
            upper = eval_func(cgra, design, best)
            if check_cutoff(lower, upper):
                lower = lower_func(cgra, design)

            attest_func(cgra, design, best)

            log(f'bounds: [{lower}, {upper}])')

            while check_cutoff(lower, upper) \
                and solve_timer.times[-1] + build_timer.times[-1] <= init_time \
                and solve_timer.total + build_timer.total < init_time * 100:
                apply(*funcs)
                not_this(m, vars)

                if do_checksat():
                    m = vars.save_model()
                    e = eval_func(cgra, design, m)
                    if upper > e:
                        log(f'\nnew model eval: {e}')
                        upper = e
                        best = m
                    elif upper == e:
                        log('=', end='')
                    else:
                        log('+', end='')

                    sol += 1
                else:
                    log('solutions exhausted')
                    break

            self._model = best
            log(f'optimal found: {upper}')
            if return_bounds:
                return (True, lower, upper)
            else:
                return True
        else:
            if return_bounds:
                return (False, None, None)
            else:
                return False

    def optimize_design(
        self,
        optimizer: optimization.Optimizer,
        init_funcs: ConstraintGeneratorList,
        funcs: ConstraintGeneratorList,
        verbose: bool = False,
        attest_func: tp.Optional[ModelReader] = None,
        first_cut: tp.Optional[tp.Callable[[int, int], int]] = None,
        build_timer: tp.Optional[Timer] = None,
        solve_timer: tp.Optional[Timer] = None,
        cutoff: tp.Optional[float] = None,
        return_bounds: bool = False,
        optimize_final: bool = False,
    ) -> bool:

        if not verbose:
            log = lambda *args, **kwargs: None
        else:
            log = ft.partial(print, sep='', flush=True)

        if not self._check_pigeons():
            log('Infeasible: too many pigeons')
            if return_bounds:
                return (False, None, None)
            else:
                return False

        solver = self._solver
        vars = self._vars
        cgra = self.cgra
        design = self.design
        args = cgra, design, vars, solver
        incremental = self._incremental

        if attest_func is None:
            attest_func: ModelReader = lambda *args: True

        if first_cut is None:
            first_cut = lambda l, u: int(max(u - 1, (u + l) / 2))

        if build_timer is None:
            build_timer = NullTimer()

        if solve_timer is None:
            solve_timer = NullTimer()

        if cutoff is None:

            def check_cutoff(lower, upper):
                return False
        elif cutoff == 0:

            def check_cutoff(lower, upper):
                return lower < upper
        else:

            def check_cutoff(lower, upper):
                return (upper - lower) / upper > cutoff

        if incremental:
            sat_cb = solver.Push

            def unsat_cb():
                solver.Pop()
                solver.Push()
        else:
            sat_cb = self._reset
            unsat_cb = self._reset

        def apply(
                *funcs: ConstraintGeneratorType
        ) -> tp.List[smt_switch_types.Term]:
            log('Building constraints:')
            build_timer.start()
            for f in funcs:
                log('  ', f.__qualname__, end='... ')
                solver.Assert(f(*args))
                log('done')
            build_timer.stop()
            log('---\n')

        def do_checksat():
            solve_timer.start()
            s = solver.CheckSat()
            solve_timer.stop()
            if s:
                log('sat')
            else:
                log('unsat')
            return s

        eval_func = optimizer.eval_func
        limit_func = optimizer.limit_func
        lower_func = optimizer.lower_func

        if cutoff is None and not optimize_final:
            funcs = *init_funcs, *funcs
        else:
            funcs = *init_funcs, optimizer.init_func, *funcs

        apply(*funcs)

        if incremental:
            funcs = ()

        if do_checksat():
            lower = 0
            best = vars.save_model()
            upper = eval_func(cgra, design, best)
            if check_cutoff(lower, upper) or optimize_final:
                lower = lower_func(cgra, design)
            next = first_cut(lower, upper)
            attest_func(cgra, design, best)
            sat_cb()

            next_f = None

            if not check_cutoff(lower, upper) and optimize_final:
                optimize_final = False

                def check_cutoff(lower, upper):
                    return lower < upper

                log('freazing placement')
                if incremental:
                    next_f = optimization.freaze_fus(best)
                else:
                    funcs = *funcs, optimization.freaze_fus(best)

            while check_cutoff(lower, upper):
                assert lower <= next <= upper
                log(f'bounds: [{lower}, {upper}])')
                log(f'next: {next}\n')

                f = limit_func(lower, next)
                if next_f is None:
                    apply(*funcs, f)
                else:
                    apply(*funcs, next_f, f)

                if do_checksat():
                    best = vars.save_model()
                    upper = eval_func(cgra, design, best)
                    attest_func(cgra, design, best)
                    sat_cb()
                else:
                    lower = next + 1
                    unsat_cb()
                next = int((upper + lower) / 2)

                next_f = None
                if not check_cutoff(lower, upper) and optimize_final:
                    optimize_final = False

                    def check_cutoff(lower, upper):
                        return lower < upper

                    log('freazing placement')
                    if incremental:
                        next_f = optimization.freaze_fus(best)
                    else:
                        funcs = *funcs, optimization.freaze_fus(best)

            self._model = best
            log(f'optimal found: {upper}')
            if return_bounds:
                return (True, lower, upper)
            else:
                return True
        else:
            if return_bounds:
                return (False, None, None)
            else:
                return False

    def solve(self, *, verbose: bool = False):
        if not self._check_pigeons():
            if verbose:
                print('Infeasible: too many pigeons', flush=True)
            return False
        solver = self._solver
        if verbose:
            print('Solving ...', flush=True)

        if not solver.CheckSat():
            solver.Reset()
            self._init_solver()
            return False

        self._model = self._vars.save_model()
        return True

    def attest_design(self, *funcs: ModelReader, verbose: bool = False):
        model = self._model
        assert model is not None
        args = self.cgra, self.design, model

        if verbose:
            for f in funcs:
                print(f.__qualname__, flush=True)
                f(*args)
        else:
            for f in funcs:
                f(*args)

    @property
    def cgra(self) -> MRRG:
        return self._cgra

    @property
    def design(self) -> Design:
        return self._design