Пример #1
0
    def get_parameters_from_cache(self,
                                  t: int,
                                  start: int = 0,
                                  n_best_parameters: int = 1):
        loss = self._Loss(self, t, start=start)
        optimizer = Optimizer(self.dimensions,
                              cache_path=self.cache_path,
                              **self._optimizer_kwargs)

        cached_result = optimizer.load_cached_result(loss)

        if n_best_parameters == 1:
            return get_optimal_parameters(cached_result)

        return get_n_best_parameters(n_best_parameters, cached_result)
    def __init__(self):
        super(Beamline, self).__init__()
        self.voltages = Voltages()

        self.manager = mp.Manager()
        self.current = mp.Value('d', 0.0)
        self.current_std = mp.Value('d', 0.0)
        self.stamp = mp.Value('d', 0.0)
        self.readback = self.manager.dict()
        self.setpoints = self.manager.dict()
        for i in range(10):
            name = 'Control_{}'.format(str(i))
            self.voltages[name] = Voltage(name=name)
            self.readback[name] = 0
            self.setpoints[name] = 0

        self.last = 0
        self.max = 0
        self.optimalTime = time.time()
        self.optimalSettings = self.voltages.setpoints

        self.continueScanning = False

        self.data = pd.DataFrame()

        self.optimizer = Optimizer(self)

        self.makeControlProcess()

        th.Timer(0, self.update).start()
        self.stop = False
Пример #3
0
 def __init__(self, models, prediction_horizon, history_length,
              subsampling_rate, initial_controls, number_of_controls,
              number_of_variables):
     self.models = models
     self.prediction_horizon = prediction_horizon
     self.history_length = history_length
     self.initial_controls = initial_controls
     self.interpolated_controls = self.initial_controls
     self.initial_optimization_values = np.tile(self.initial_controls,
                                                prediction_horizon)
     self.subsampling_rate = subsampling_rate
     self.number_of_controls = number_of_controls
     self.number_of_variables = number_of_variables
     self.interpolated_tick = 0
     self.optimizer = Optimizer()
     self.predictions = list()
     prediction.initialize_models()
Пример #4
0
class Control:
    def __init__(self, models, prediction_horizon, history_length,
                 subsampling_rate, initial_controls, number_of_controls,
                 number_of_variables):
        self.models = models
        self.prediction_horizon = prediction_horizon
        self.history_length = history_length
        self.initial_controls = initial_controls
        self.interpolated_controls = self.initial_controls
        self.initial_optimization_values = np.tile(self.initial_controls,
                                                   prediction_horizon)
        self.subsampling_rate = subsampling_rate
        self.number_of_controls = number_of_controls
        self.number_of_variables = number_of_variables
        self.interpolated_tick = 0
        self.optimizer = Optimizer()
        self.predictions = list()
        prediction.initialize_models()

    def get_next_actuators(self, full_track_history, current_tick):
        print(current_tick)
        if current_tick < self.subsampling_rate:
            current_actuators = self.initial_controls
        elif current_tick > 0 and current_tick % self.subsampling_rate == 0:

            self.interpolated_tick = current_tick % self.subsampling_rate
            history_array = prediction.prepare_tick_history(
                full_track_history.get_history(self.history_length,
                                               self.subsampling_rate))
            predictions, optimized_actuators = self.optimizer.run_optimizer(
                self.models, history_array, self.prediction_horizon,
                self.initial_optimization_values, self.history_length,
                current_tick, self.subsampling_rate, self.number_of_controls,
                self.number_of_variables)
            self.predictions.append(predictions)
            current_actuators = optimized_actuators[:self.number_of_controls -
                                                    1]
            self.interpolated_controls = np.transpose(
                np.array([
                    np.linspace(i, j, self.subsampling_rate + 1)
                    for i, j in zip(
                        optimized_actuators[:self.number_of_controls - 1],
                        optimized_actuators[(self.number_of_controls - 1):(
                            2 * (self.number_of_controls - 1))])
                ]))
            self.initial_optimization_values = np.concatenate(
                [optimized_actuators[3:], self.initial_controls])
        else:
            current_actuators = self.interpolated_controls[
                self.interpolated_tick]
        return current_actuators

    def write_prediction(self, index):
        matrix = self.predictions[0][0][index]
        for optimization_run in self.predictions:
            for prediction_run in optimization_run:
                matrix = np.vstack((matrix, prediction_run[index]))
        np.savetxt('prediction_' + str(index) + '.txt', matrix, delimiter=',')
Пример #5
0
    def fit(self,
            t: int,
            start: int = 0,
            n_best_parameters=1,
            **kwargs) -> Union[List[float], List[Tuple[float, List[float]]]]:
        loss = self._Loss(self, t, start=start)
        optimizer = Optimizer(self.dimensions,
                              cache_path=self.cache_path,
                              **self._optimizer_kwargs)

        previous_parameters = self.get_previous_parameters(t, start=t)

        optimization_result = optimizer.optimize(
            loss, initial_parameter_points=previous_parameters, **kwargs)

        self.results[(t, start)] = optimization_result
        self.parameters = get_optimal_parameters(optimization_result)

        if n_best_parameters == 1:
            return get_optimal_parameters(optimization_result)

        return get_n_best_parameters(n_best_parameters, optimization_result)
Пример #6
0
 def __init__(self,
              T=None,
              n=None,
              pi="uniform",
              massmatrix=None,
              eigensolver=ScipySchur(),
              optimizer=Optimizer()):
     self.T = T
     self.n = n
     self.pi = get_pi(T, pi)
     self.massmatrix = massmatrix
     self.eigensolver = eigensolver
     self.optimizer = optimizer
     if T is not None:
         self.solve()
Пример #7
0
def train(args):
    bert_config = BertConfig(args.bert_config_path)
    bert_config.print_config()

    if not (args.do_train or args.do_test):
        raise ValueError("For args `do_train`, `do_test`, at "
                        "least one of them must be True.")

    trainer_count = fluid.dygraph.parallel.Env().nranks

    task_name = args.task_name.lower()
    processors = {
        'xnli': reader.XnliProcessor,
        'cola': reader.ColaProcessor,
        'mrpc': reader.MrpcProcessor,
        'mnli': reader.MnliProcessor,
    }

    processor = processors[task_name](data_dir=args.data_dir,
                                      vocab_path=args.vocab_path,
                                      max_seq_len=args.max_seq_len,
                                      do_lower_case=args.do_lower_case,
                                      in_tokens=args.in_tokens,
                                      random_seed=args.random_seed)
    num_labels = len(processor.get_labels())
    shuffle_seed = 1 if trainer_count > 1 else None

    train_data_generator = processor.data_generator(
                                      batch_size=args.batch_size,
                                      phase='train',
                                      epoch=args.epoch,
                                      dev_count=trainer_count,
                                      shuffle=args.shuffle,
                                      shuffle_seed=shuffle_seed)
    num_train_examples = processor.get_num_examples(phase='train')
    max_train_steps = args.epoch * num_train_examples // args.batch_size // trainer_count
    warmup_steps = int(max_train_steps * args.warmup_proportion)

    print("Device count: %d" % dev_count)
    print("Trainer count: %d" % trainer_count)
    print("Num train examples: %d" % num_train_examples)
    print("Max train steps: %d" % max_train_steps)
    print("Num warmup steps: %d" % warmup_steps)

    with fluid.dygraph.guard(place):

        if args.use_data_parallel:
            strategy = fluid.dygraph.parallel.prepare_context()

        cls_model = ClsModelLayer(
                            args,
                            bert_config,
                            num_labels,
                            is_training=True,
                            return_pooled_out=True)

        optimizer = Optimizer(
                    warmup_steps=warmup_steps,
                    num_train_steps=max_train_steps,
                    learning_rate=args.learning_rate,
                    model_cls=cls_model,
                    weight_decay=args.weight_decay,
                    scheduler=args.lr_scheduler,
                    loss_scaling=args.loss_scaling,
                    parameter_list=cls_model.parameters())

        if args.init_pretraining_params:
            print("Load pre-trained model from %s" % args.init_pretraining_params)
            init_from_static_model(args.init_pretraining_params, cls_model, bert_config)

        if args.use_data_parallel:
            cls_model = fluid.dygraph.parallel.DataParallel(cls_model, strategy)
            train_data_generator = fluid.contrib.reader.distributed_batch_reader(train_data_generator)

        steps = 0
        time_begin = time.time()

        for batch in train_data_generator():
            data_ids = create_data(batch)
            loss, accuracy, num_seqs = cls_model(data_ids)

            optimizer.optimization(loss, use_data_parallel = args.use_data_parallel, model = cls_model)
            cls_model.clear_gradients()

            if steps != 0 and steps % args.skip_steps == 0:
                time_end = time.time()
                used_time = time_end - time_begin
                current_example, current_epoch = processor.get_train_progress()
                localtime = time.asctime(time.localtime(time.time()))
                print("%s, epoch: %s, steps: %s, dy_graph loss: %f, acc: %f, speed: %f steps/s" % (localtime, current_epoch, steps, loss.numpy(), accuracy.numpy(), args.skip_steps / used_time))
                time_begin = time.time()

            if steps != 0 and steps % args.save_steps == 0 and fluid.dygraph.parallel.Env().local_rank == 0:
                save_path = os.path.join(args.checkpoints, "steps" + "_" + str(steps))
                fluid.save_dygraph(
                    cls_model.state_dict(),
                    save_path)
                fluid.save_dygraph(
                    optimizer.optimizer.state_dict(),
                    save_path)
                print("Save model parameters and optimizer status at %s" % save_path)

            steps += 1

        if fluid.dygraph.parallel.Env().local_rank == 0:
            save_path = os.path.join(args.checkpoints, "final")
            fluid.save_dygraph(
                cls_model.state_dict(),
                save_path)
            fluid.save_dygraph(
                optimizer.optimizer.state_dict(),
                save_path)
            print("Save model parameters and optimizer status at %s" % save_path)
        return cls_model
Пример #8
0
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 29 21:37:49 2019

@author: jezequel
"""
from objects import Accessory, Gear, Drive, Reductor, ShaftAssembly, Shaft, Generator
from optimization import Optimizer
import matplotlib.pyplot as plt
from matplotlib import patches

engine = Accessory(diameter=0.30, length=0.050, speed=4000, name='Engine')
accessories = [Accessory(diameter=0.15, length=0.050, speed=2000, name='Fuel Pump'),
               Accessory(diameter=0.12, length=0.050, speed=3500, name='Starter'),
               Accessory(diameter=0.18, length=0.050, speed=750, name='Oil Pump')]

generator = Generator(engine, accessories)

limits = {'minimum' : {'x' : 0.2, 'y' : 0.05},
          'maximum' : {'x' : 0.7, 'y' : 0.3}}

results = []
for reductor in generator.reductors:
    optimizer = Optimizer(reductor=reductor, limits=limits)

    res = optimizer.minimize(1000)
    print(res.success)
    print([gear.diameter for sa in optimizer.reductor.shaft_assemblies for gear in sa.gears])
    print(optimizer.reductor.speeds())
    results.append(optimizer.reductor)
Пример #9
0
    answer = yesno_input(
        "Do you want to be able to thrash the data every region that is scanned? (y/n) "
    )
    thrash_data = (answer == "y")

    optimizationScheme = input(
        "What version of the optimization routine do you want? Normal (0), Automatic quality rating (1), Fully automated (2) "
    )
    while optimizationScheme not in ["0", "1", "2"]:
        print("Sorry, what did you say? ")
        optimizationScheme = input(
            "What version of the optimization routine do you want? Normal (0), Automatic quality rating (1), Fully automated (2) "
        )
    if optimizationScheme == "0":
        OPT = Optimizer(config,
                        config_conf,
                        config_sted,
                        thrash_data=thrash_data)
    elif optimizationScheme == "1":
        autoquality = yesno_input(
            "Do you want to change the parameters of the QualityNet? (y/n) ")
        if autoquality == "y":
            for key in config["autoquality"]:
                val = input("What should be the {}? ".format(key))
                if key == "IP":
                    config["autoquality"][key] = val
                else:
                    config["autoquality"][key] = int(val)
        OPT = Optimizer(config,
                        config_conf,
                        config_sted,
                        autoquality=True,