Esempio n. 1
0
    def write_parameters_table_to_tensorboard(self):
        """
        Write the summaries, static and dynamic hyperparameters to the table in tensorboard's hparams section. This
        method is called once for creating the hparams table.
        """
        # Check if needed to track hyperparameters:
        if (len(self._static_hyperparameters) == 0
                and len(self._dynamic_hyperparameters) == 0):
            return

        # Prepare the static hyperparameters values:
        non_graph_parameters = {"Date": str(datetime.now()).split(".")[0]}
        hp_param_list = [hp_api.HParam("Date")]
        for parameter, value in self._static_hyperparameters.items():
            non_graph_parameters[parameter] = value
            hp_param_list.append(hp_api.HParam(parameter))

        # Prepare the summaries values and the dynamic hyperparameters values (both registered as metrics):
        graph_parameters = {}
        hp_metric_list = []
        for metric in self._training_results:
            for prefix in ["training", "validation"]:
                metric_name = f"{self._Sections.SUMMARY}/{prefix}_{metric}"
                graph_parameters[metric_name] = 0.0
                hp_metric_list.append(hp_api.Metric(metric_name))
        for parameter, epochs in self._dynamic_hyperparameters.items():
            parameter_name = f"{self._Sections.HYPERPARAMETERS}/{parameter}"
            graph_parameters[parameter_name] = epochs[-1]
            hp_metric_list.append(hp_api.Metric(parameter_name))

        # Write the hyperparameters and summaries to the table:
        with self._file_writer.as_default():
            hp_api.hparams_config(hparams=hp_param_list,
                                  metrics=hp_metric_list)
            hp_api.hparams(non_graph_parameters, trial_id=self._run_name)
Esempio n. 2
0
    def tuning(self):
        #하이퍼파라미터 설정
        HP_init = hp.HParam('init_lr', hp.Discrete([1E-5, 1E-4, 1E-3]))
        HP_optimizer = hp.HParam('optimizer', hp.Discrete(["RMSPROP", "ADAM_W"]))
        HP_scheduler = hp.HParam('lr_scheduler', hp.Discrete(['constant', 'piecewise_decay',
                                                                'linear_decay', 'cosine_decay_restart']))
        HP_batch = hp.HParam('batch_size', hp.Discrete([32, 64, 128]))
        HP_weight_decay = hp.HParam('weight_decay', hp.Discrete([1E-5, 5E-5, 1E-4]))
        session_num = 0

        for init_lr in HP_init.domain.values:
            for optimizer in HP_optimizer.domain.values:
                for lr_scheduler in HP_scheduler.domain.values:
                    for batch_size in HP_batch.domain.values:
                        for weight_decay in HP_weight_decay.domain.values:
                            hparams = {
                                HP_init : init_lr,
                                HP_optimizer: optimizer,
                                HP_scheduler: lr_scheduler,
                                HP_batch: batch_size,
                                HP_weight_decay : weight_decay
                            }
                            run_name = "run-%d" % session_num
                            print('--- Starting trial: %s' % run_name)
                            print({h.name: hparams[h] for h in hparams})
                            self.run(session_num, hparams)
                            session_num += 1
Esempio n. 3
0
 def __init__(self,
              lr_range=(1.e-8, 1.e8),
              rho_range=(0., 1.),
              momentum_range=(0., 1.),
              epsilon_range=(0., 1.)):
     super(RMSpropManager, self).__init__()
     self._hparam = {
         "optimizer":
         hp.HParam("optimizer",
                   domain=hp.Discrete(['RMSProp']),
                   display_name="Optimizer"),
         "learning_rate":
         hp.HParam("learning_rate",
                   domain=hp.RealInterval(*lr_range),
                   display_name="Learning rate"),
         "rho":
         hp.HParam("rho",
                   domain=hp.RealInterval(*rho_range),
                   display_name="Rho"),
         "momentum":
         hp.HParam("momentum",
                   domain=hp.RealInterval(*momentum_range),
                   display_name="Momentum rate"),
         "epsilon":
         hp.HParam("epsilon",
                   domain=hp.RealInterval(*epsilon_range),
                   display_name="Epsilon"),
     }
Esempio n. 4
0
	def __init__(self, teacher_forcing=False):
		# number of units in 1st and 2nd LSTM layer, and the next dense layer
		self.num_units_l1 = hp.HParam('num_units_l1', hp.Discrete([32, 64]))
		self.num_units_l2 = hp.HParam('num_units_l2', hp.Discrete([16, 32, 64]))
		self.num_units_l3 = hp.HParam('num_units_l3', hp.Discrete([32, 64]))
		self.dropout = hp.HParam('dropout', hp.Discrete([0.3, 0.4]))
		
		#self.learning_rate = hp.HParam('learning_rate', hp.RealInterval(0.01, 0.5))
		self.optimizer = hp.HParam('optimizer', hp.Discrete(['adam', 'sgd']))
		
		self.hparams = {self.optimizer: self.optimizer, self.num_units_l1: self.num_units_l1, self.num_units_l2: self.num_units_l2, self.num_units_l3: self.num_units_l3, self.dropout: self.dropout}
		
		self.teacher_forcing = teacher_forcing
		
		self.model = None
		
		METRIC_ACCURACY = 'accuracy'
		
		self.timestamp = int(time())
		print("MODEL INIT TIME: ", str(self.timestamp))
		self.log_dir = "./tf_logs/lstm_classification_" + str(self.timestamp) +"/"
		with tf.summary.create_file_writer(self.log_dir).as_default():
			hp.hparams_config(hparams=[self.optimizer, self.num_units_l1, self.num_units_l2, self.num_units_l3, self.dropout], metrics=[hp.Metric(METRIC_ACCURACY, display_name='Accuracy')],)
		
		return
Esempio n. 5
0
def _create_hparams_config(searchspace):
    hparams = []

    for key, val in searchspace.names().items():
        if val == "DOUBLE":
            hparams.append(
                hp.HParam(
                    key,
                    hp.RealInterval(float(searchspace.get(key)[0]),
                                    float(searchspace.get(key)[1])),
                ))
        elif val == "INTEGER":
            hparams.append(
                hp.HParam(
                    key,
                    hp.IntInterval(
                        searchspace.get(key)[0],
                        searchspace.get(key)[1]),
                ))
        elif val == "DISCRETE":
            hparams.append(hp.HParam(key, hp.Discrete(searchspace.get(key))))
        elif val == "CATEGORICAL":
            hparams.append(hp.HParam(key, hp.Discrete(searchspace.get(key))))

    return hparams
Esempio n. 6
0
 def evaluate(self):
     if self._test:
         if self._test_labels is not None:
             predictions = self._pred_model.predict(self._test_ds,
                                                steps=self._test_steps)
             test_accuracy = np.mean(predictions == self._test_labels)
             self._record_scalars(epoch_end_test_accuracy=test_accuracy)
             
             if self._old_test_labels is not None:
                 nmi = normalized_mutual_info_score(self._test_labels, self._old_test_labels,
                                                    average_method="arithmetic")
                 self._record_scalars(test_nmi=nmi)
                 
     if self._downstream_labels is not None:
         # choose the hyperparameters to record
         if not hasattr(self, "_hparams_config"):
             from tensorboard.plugins.hparams import api as hp
             hparams = {
                 hp.HParam("pca_dim", hp.IntInterval(0, 1000000)):self.config["pca_dim"],
                 hp.HParam("k", hp.IntInterval(1, 1000000)):self.config["k"],
                 hp.HParam("mult", hp.IntInterval(1, 1000000)):self.config["mult"],
                 hp.HParam("sobel", hp.Discrete([True, False])):self.input_config["sobel"]
                 }
             for e, d in enumerate(self.config["dense"]):
                 hparams[hp.HParam("dense_%s"%e, hp.IntInterval(1, 1000000))] = d
         else:
             hparams=None
         self._linear_classification_test(hparams)
         
         
         
     
         
    def tuning(self):
        self.HP_NUM_UNITS = hp.HParam('num_units', hp.Discrete([3, 6]))
        self.HP_DROPOUT = hp.HParam('dropout', hp.RealInterval(0.1, 0.2))
        self.HP_OPTIMIZER = hp.HParam('optimizer', hp.Discrete(['adam',
                                                                'sgd']))

        self.METRIC_ACCURACY = 'accuracy'

        with tf.summary.create_file_writer('logs/hparam_tuning').as_default():
            hp.hparams_config(
                hparams=[
                    self.HP_NUM_UNITS, self.HP_DROPOUT, self.HP_OPTIMIZER
                ],
                metrics=[
                    hp.Metric(self.METRIC_ACCURACY, display_name='Accuracy')
                ],
            )
        session_num = 0

        for num_units in self.HP_NUM_UNITS.domain.values:
            for dropout_rate in (self.HP_DROPOUT.domain.min_value,
                                 self.HP_DROPOUT.domain.max_value):
                for optimizer in self.HP_OPTIMIZER.domain.values:
                    hparams = {
                        self.HP_NUM_UNITS: num_units,
                        self.HP_DROPOUT: dropout_rate,
                        self.HP_OPTIMIZER: optimizer,
                    }
                    run_name = "run-%d" % session_num
                    print('--- Starting trial: %s' % run_name)
                    print({h.name: hparams[h] for h in hparams})
                    self.run('logs/hparam_tuning/' + run_name, hparams)
                    session_num += 1
Esempio n. 8
0
def hp_tuning(estimator_input, input_width, model_path):

    HP_NUM_UNITS = hp.HParam('num_units', hp.Discrete([128, 256, 512, 1024]))
    HP_DROPOUT = hp.HParam('dropout', hp.Discrete([0.0, 0.05, 0.1, 0.5]))
    HP_LEARNING_RATE = [0.00001, 0.0005, 0.001]
    HP_ACTIVATION = hp.HParam('activation', hp.Discrete(['relu', 'tanh']))

    METRIC_ACCURACY = 'accuracy'

    with tf.summary.create_file_writer(model_path + '/logs/').as_default():
        hp.hparams_config(
            hparams=[HP_NUM_UNITS, HP_ACTIVATION],
            metrics=[hp.Metric(METRIC_ACCURACY, display_name='Accuracy')],
        )
    session_num = 0

    for num_units in HP_NUM_UNITS.domain.values:
        for dropout_rate in HP_DROPOUT.domain.values:
            for activation in HP_ACTIVATION.domain.values:
                for learning_rate in HP_LEARNING_RATE:
                    hparams = {
                        'num_units': num_units,
                        'dropout': dropout_rate,
                        'activation': activation,
                        'learning_rate': learning_rate
                    }
                    run_name = id_from_hp(hparams)
                    print('--- Starting trial: %s' % run_name)
                    print({k: v for k, v in hparams.items()})
                    run(model_path + '/logs/' + run_name, hparams,
                        estimator_input, input_width)
                    session_num += 1
Esempio n. 9
0
    def __init__(self, logdir: str,
                 hparams: Dict[str, Union[Tuple[float, float],
                                          List]], metrics: Dict[str, str]):
        self._hparams = []
        for name, param in hparams.items():
            if isinstance(param, Tuple):
                min, max = param
                if isinstance(min, float):
                    self._hparams.append(
                        hp.HParam(
                            name, hp.RealInterval(min_value=min,
                                                  max_value=max)))
                elif isinstance(min, int):
                    self._hparams.append(
                        hp.HParam(name,
                                  hp.IntInterval(min_value=min,
                                                 max_value=max)))
            elif isinstance(param, List):
                self._hparams.append(hp.HParam(name, hp.Discrete(param)))

        self._metrics = metrics
        self._writer = tf.summary.create_file_writer(logdir=logdir)
        with self._writer.as_default():
            hp.hparams_config(
                hparams=self._hparams,
                metrics=[
                    hp.Metric(name, display_name=display)
                    for name, display in metrics.items()
                ],
            )
Esempio n. 10
0
 def test_duplicate_hparam_names_from_two_objects(self):
   hparams = {
       hp.HParam("foo"): 1,
       hp.HParam("foo"): 1,
   }
   with six.assertRaisesRegex(
       self, ValueError, "multiple values specified for hparam 'foo'"):
     hp.KerasCallback(self.get_temp_dir(), hparams)
Esempio n. 11
0
def log_hyperparameters():
    """

    Blueprint for hyperparameter and metric logging in tensorboard during hyperparameter tuning

    Returns:
        logparams (list): List containing the hyperparameters to log in tensorboard.
        metrics (list): List containing the metrics to log in tensorboard.

    """

    logparams = [
        hp.HParam(
            "latent_dim",
            hp.Discrete([2, 4, 6, 8, 12, 16]),
            display_name="latent_dim",
            description="encoding size dimensionality",
        ),
        hp.HParam(
            "n_components",
            hp.IntInterval(min_value=1, max_value=25),
            display_name="n_components",
            description="latent component number",
        ),
        hp.HParam(
            "gram_weight",
            hp.RealInterval(min_value=0.0, max_value=1.0),
            display_name="gram_weight",
            description="weight of the gram loss",
        ),
    ]

    metrics = [
        hp.Metric(
            "val_number_of_populated_clusters",
            display_name="number of populated clusters",
        ),
        hp.Metric(
            "val_reconstruction_loss",
            display_name="reconstruction loss",
        ),
        hp.Metric(
            "val_gram_loss",
            display_name="gram loss",
        ),
        hp.Metric(
            "val_vq_loss",
            display_name="vq loss",
        ),
        hp.Metric(
            "val_total_loss",
            display_name="total loss",
        ),
    ]

    return logparams, metrics
Esempio n. 12
0
def make_hparam_list():
    #하이퍼파라미터 설정
    HP_init = hp.HParam('init_lr', hp.Discrete([1E-5, 1E-4, 1E-3]))
    HP_optimizer = hp.HParam('optimizer', hp.Discrete(["RMSPROP", "ADAM_W"]))
    HP_scheduler = hp.HParam('lr_scheduler', hp.Discrete(['constant', 'piecewise_decay',
                                                            'linear_decay', 'cosine_decay_restart']))
    HP_batch = hp.HParam('batch_size', hp.Discrete([32, 64, 128]))
    HP_weight_decay = hp.HParam('weight_decay', hp.Discrete([1E-5, 5E-5, 1E-4]))
    #batch norm : momentum=0.99, epsilon 1E-3 (keras default settings)
    return [HP_init, HP_optimizer, HP_scheduler, HP_batch, HP_weight_decay]
Esempio n. 13
0
 def __init__(self, lr_range=(1.e-8, 1.), momentum_range=(0., 1.)):
     super(SGDMomentumManager, self).__init__(lr_range=lr_range)
     self._hparam['momentum'] = hp.HParam(
         'momentum',
         description="Momentum Rate",
         domain=hp.RealInterval(*momentum_range))
     self._hparam['nesterov'] = hp.HParam('nesterov',
                                          description="Momentum",
                                          domain=hp.Discrete(
                                              ['standard', 'nesterov']))
Esempio n. 14
0
def init_hyperparam_space(logdir, hparams, metrics):
    # Add dataset and model as hyperparameters
    hparams = [
        hp.HParam('dataset', hp.Discrete(medical_ts_datasets.builders)),
        hp.HParam('model', hp.Discrete(seft.models.__all__))
    ] + list(hparams)
    sess = tf.compat.v1.keras.backend.get_session()
    with tf.compat.v2.summary.create_file_writer(logdir).as_default() as w:
        sess.run(w.init())
        sess.run(hp.hparams_config(hparams=hparams, metrics=metrics))
        sess.run(w.flush())
Esempio n. 15
0
	def __init__(self, parent=None):
		super(MainWindow, self).__init__(parent)
		self.setupUi(self)
		self.onBindingUI()
		
		self.batch_size = 32
		self.learning_rate = 0.001
		self.optimizer = 'sgd'
		
		self.HP_BATCH_SIZE = hp.HParam('num_units', hp.Discrete([self.batch_size, self.batch_size]))
		self.HP_LEARNING_RATE = hp.HParam('dropout', hp.RealInterval(self.learning_rate, self.learning_rate))
		self.HP_OPTIMIZER = hp.HParam('optimizer', hp.Discrete([self.optimizer]))
Esempio n. 16
0
 def __init__(self, lr_range=(1.e-8, 1.)):
     """Create a SGD manager with a specific learning rate range"""
     super(SGDManager, self).__init__()
     self._hparam = {
         "optimizer":
         hp.HParam("optimizer",
                   domain=hp.Discrete(['SGD']),
                   display_name="Optimizer"),
         "learning_rate":
         hp.HParam("learning_rate",
                   domain=hp.RealInterval(*lr_range),
                   display_name="Learning rate"),
     }
Esempio n. 17
0
    def evaluate(self, avpool=True):
        predictions = self._models["full"].predict(self._val_ds)
        num_categories = len(self.categories)

        for i in range(num_categories):
            category = self.categories[i]
            preds = predictions[:, i]
            y_true = self._val_labels[:, i]

            acc = np.mean(y_true == (preds >= 0.5).astype(int))

            auc = roc_auc_score(y_true, preds)
            self._record_scalars(**{
                f"val_accuracy_{category}": acc,
                f"val_auc_{category}": auc
            },
                                 metric=True)

        # choose the hyperparameters to record
        if not hasattr(self, "_hparams_config"):
            from tensorboard.plugins.hparams import api as hp

            metrics = []
            for c in self.categories:
                metrics.append(f"val_accuracy_{c}")
                metrics.append(f"val_auc_{c}")

            hparams = {
                hp.HParam("lam", hp.RealInterval(0., 10000.)):
                self.config["lam"],
                hp.HParam("tau", hp.RealInterval(0., 10000.)):
                self.config["tau"],
                hp.HParam("mu", hp.RealInterval(0., 10000.)):
                self.config["mu"],
                hp.HParam("batch_size", hp.RealInterval(0., 10000.)):
                self.input_config["batch_size"],
                hp.HParam("lr", hp.RealInterval(0., 10000.)):
                self.config["lr"],
                hp.HParam("lr_decay", hp.RealInterval(0., 10000.)):
                self.config["lr_decay"],
                hp.HParam("decay_type",
                          hp.Discrete(["cosine", "exponential", "staircase"])):
                self.config["decay_type"],
                hp.HParam("opt_type", hp.Discrete(["sgd", "adam", "momentum"])):
                self.config["opt_type"],
                hp.HParam("weight_decay", hp.RealInterval(0., 10000.)):
                self.config["weight_decay"]
            }

            self._hparams_config = hp.hparams_config(
                hparams=list(hparams.keys()),
                metrics=[hp.Metric(m) for m in metrics])
            # record hyperparameters
            base_dir, run_name = os.path.split(self.logdir)
            if len(run_name) == 0:
                base_dir, run_name = os.path.split(base_dir)
Esempio n. 18
0
    def evaluate(self):
        num_test_images = 10
        if self._test:
            preds = self._models["inpainter"].predict(self._test_masked_ims)
            preds = preds[:, :self.input_config["imshape"][0], :self.
                          input_config["imshape"][1], :]

            reconstruction_residual = self._test_mask * (preds -
                                                         self._test_ims)
            reconstructed_loss = np.mean(np.abs(reconstruction_residual))

            # see how the discriminator does on them
            disc_outputs_on_raw = self._models["discriminator"].predict(
                self._test_ims)
            disc_outputs_on_inpaint = self._models["discriminator"].predict(
                preds)
            # for the visualization in tensorboard: replace the unmasked areas
            # with the input image as a guide to the eye
            preds = preds * self._test_mask + self._test_ims * (
                1 - self._test_mask)
            predviz = np.concatenate([
                self._test_masked_ims[:num_test_images],
                preds[:num_test_images]
            ], 2).astype(np.float32)

            # record all the summaries
            tf.summary.image("inpaints",
                             predviz,
                             step=self.step,
                             max_outputs=10)
            tf.summary.histogram("disc_outputs_on_raw",
                                 disc_outputs_on_raw,
                                 step=self.step)
            tf.summary.histogram("disc_outputs_on_inpaint",
                                 disc_outputs_on_inpaint,
                                 step=self.step)
            self._record_scalars(test_recon_loss=reconstructed_loss)

        if self._downstream_labels is not None:
            # choose the hyperparameters to record
            if not hasattr(self, "_hparams_config"):
                from tensorboard.plugins.hparams import api as hp
                hparams = {
                    hp.HParam("adv_weight", hp.RealInterval(0., 10000.)):
                    self.config["adv_weight"],
                    hp.HParam("sobel", hp.Discrete([True, False])):
                    self.input_config["sobel"]
                }
            else:
                hparams = None
            self._linear_classification_test(hparams)
Esempio n. 19
0
def hparam_tuning(train_dataset: tf.data.Dataset, val_dataset: tf.data.Dataset,
                  epochs: int, log_dir: str):
    """Performs hyperparameter tuning.

    Args:
        train_dataset: A `tf.data` dataset. Should return a tuple
            of `(inputs, labels)`.
        val_dataset: A `tf.data` dataset on which to evaluate
            the loss and any metrics at the end of each epoch.
            Should return a tuple of `(inputs, labels)`.
        epochs: Number of epochs to train the model.
        log_dir: The directory where logs will be written.
    """

    HP_DROPOUT = hp.HParam('dropout', hp.Discrete([0.1, 0.2, 0.5]))
    HP_LEARNING_RATE = hp.HParam('learning_rate',
                                 hp.Discrete([1e-3, 1e-4, 1e-5]))
    HP_OPTIMIZER = hp.HParam('optimizer',
                             hp.Discrete(['sgd', 'rmsprop', 'adam']))

    METRIC_ACCURACY = 'accuracy'

    with tf.summary.create_file_writer(log_dir).as_default():
        hp.hparams_config(
            hparams=[HP_DROPOUT, HP_LEARNING_RATE, HP_OPTIMIZER],
            metrics=[hp.Metric(METRIC_ACCURACY, display_name='Accuracy')])

    trial_step = 0

    for dropout in HP_DROPOUT.domain.values:
        for learning_rate in HP_LEARNING_RATE.domain.values:
            for optimizer in HP_OPTIMIZER.domain.values:
                hparams = {
                    HP_DROPOUT: dropout,
                    HP_LEARNING_RATE: learning_rate,
                    HP_OPTIMIZER: optimizer,
                }

                trial_id = f'run-{trial_step}'
                trial_dir = os.path.join(log_dir, trial_id)
                logging.info(f'--- Starting trial: {trial_id}')
                logging.info({h.name: hparams[h] for h in hparams})

                accuracy = train_keras(train_dataset, val_dataset, dropout,
                                       learning_rate, optimizer, epochs,
                                       trial_dir, hparams)
                write_trial_log(METRIC_ACCURACY, accuracy, hparams, trial_dir)

                trial_step += 1
Esempio n. 20
0
    def priors(
            self) -> (hp.HParam, hp.HParam, hp.HParam, hp.HParam, hp.HParam):
        """
        Initialises the set of values per hyperparameter type
        :return:
        """
        alpha_units = hp.HParam('num_units', hp.Discrete(self.alpha_units))
        alpha_dropout = hp.HParam('dropout', hp.Discrete(self.alpha_dropout))

        beta_units = hp.HParam('num_units', hp.Discrete(self.beta_units))
        beta_dropout = hp.HParam('dropout', hp.Discrete(self.beta_dropout))

        optimization = hp.HParam('optimizer', hp.Discrete(self.opt))

        return alpha_units, alpha_dropout, beta_units, beta_dropout, optimization
Esempio n. 21
0
 def test_convert_hyperparams_to_hparams_multi_float(self):
     hps = hp_module.HyperParameters()
     hps.Float("theta", min_value=0.0, max_value=1.57)
     hps.Float("r", min_value=0.0, max_value=1.0)
     hparams = utils.convert_hyperparams_to_hparams(hps)
     expected_hparams = {
         hparams_api.HParam("r", hparams_api.RealInterval(0.0, 1.0)): 0.0,
         hparams_api.HParam("theta",
                            hparams_api.RealInterval(0.0, 1.57)): 0.0,
     }
     hparams_repr_list = [repr(hparams[x]) for x in hparams.keys()]
     expected_hparams_repr_list = [
         repr(expected_hparams[x]) for x in expected_hparams.keys()
     ]
     self.assertCountEqual(hparams_repr_list, expected_hparams_repr_list)
Esempio n. 22
0
    def evaluate(self, avpool=True, query_fig=False):
        if self._test:
            test_recon_loss = 0
            for x in self._test_ds:
                reconstructed = self._models["full"](x)
                test_recon_loss += np.mean(
                    np.abs(x.numpy() - reconstructed.numpy()))

            self._record_scalars(test_reconstruction_loss=test_recon_loss /
                                 self._test_steps)

            test_ims = np.concatenate([x.numpy(), reconstructed.numpy()], 2)
            self._record_images(test_images=test_ims)

        if self._downstream_labels is not None:
            # choose the hyperparameters to record
            if not hasattr(self, "_hparams_config"):
                from tensorboard.plugins.hparams import api as hp
                hparams = {
                    hp.HParam("dropout", hp.RealInterval(0., 1.)):
                    self.config["dropout"]
                }
            else:
                hparams = None
            self._linear_classification_test(hparams,
                                             avpool=avpool,
                                             query_fig=query_fig)
Esempio n. 23
0
 def evaluate(self):
     if self._downstream_labels is not None:
         # choose the hyperparameters to record
         if not hasattr(self, "_hparams_config"):
             from tensorboard.plugins.hparams import api as hp
             hparams = {
                 hp.HParam("temperature", hp.RealInterval(0., 10000.)):
                 self.config["temperature"],
                 hp.HParam("num_hidden", hp.IntInterval(1, 1000000)):
                 self.config["num_hidden"],
                 hp.HParam("output_dim", hp.IntInterval(1, 1000000)):
                 self.config["output_dim"]
             }
         else:
             hparams = None
         self._linear_classification_test(hparams)
Esempio n. 24
0
 def prepare_hparams(self, hparams_domains):
     """Convert informal specifications to a dict of hp.HParam"""
     domains = {}
     domains["num_layers"] = hp.HParam(
         "num_layers",
         hp.Discrete(hparams_domains["num_layers"]))
     domains["learning_rate"] = hp.HParam(
         "learning_rate",
         hp.RealInterval(*hparams_domains["learning_rate"]))
     domains["num_filters"] = hp.HParam(
         "num_filters",
         hp.Discrete(hparams_domains["num_filters"]))
     domains["batch_normalization"] = hp.HParam(
         "batch_normalization",
         hp.Discrete(hparams_domains["batch_normalization"]))
     return domains
Esempio n. 25
0
    def test_add_logging_user_specified(self, mock_hparams,
                                        mock_create_file_writer,
                                        mock_super_tuner):
        remote_tuner = self._remote_tuner(None, None, self._study_config)

        callbacks = [
            tf.keras.callbacks.TensorBoard(log_dir=remote_tuner.directory,
                                           write_images=True)
        ]

        remote_tuner._add_logging(callbacks, self._test_trial)

        expected_logdir = os.path.join(remote_tuner.directory,
                                       self._test_trial.trial_id, "logs")
        expected_hparams = {
            hparams_api.HParam("learning_rate",
                               hparams_api.Discrete([1e-4, 1e-3, 1e-2])):
            1e-4
        }

        self.assertLen(callbacks, 1)
        self.assertEqual(callbacks[0].log_dir, expected_logdir)
        self.assertEqual(callbacks[0].write_images, True)
        mock_create_file_writer.assert_called_once_with(expected_logdir)
        self.assertEqual(mock_hparams.call_count, 1)
        self.assertEqual(repr(mock_hparams.call_args[0][0]),
                         repr(expected_hparams))
    def __init__(self,
                 model_manager: ModelManager,
                 optimizer_manager: OptimizerManager,
                 *,
                 logdir,
                 run_name_template="run",
                 log_names=True):
        """

        Args:
            model_manager ():
            optimizer_manager ():
            logdir ():
            run_name_template ():
        """
        self.model_manager = model_manager
        self.optimizer_manager = optimizer_manager

        self.hp_dict = dict(model_manager.hparam, **optimizer_manager.hparam)

        # Logging the run name makes it easier to match the Scalars page to the HParams page
        # However it can overcrowd the Hparams page when repeating the same experiment so optionally turn it off.
        self.log_names = log_names
        if log_names:
            self.hp_dict.update(
                {"run_name": hp.HParam('run_name', display_name="Run name")})

        self.run_name = None
        self.logdir = logdir
        self.run_name_template = run_name_template
        self.run_id = 0
        self.epoch = 0
Esempio n. 27
0
    def _add_distributions(
        self,
        distributions: Dict[str,
                            optuna.distributions.BaseDistribution]) -> None:
        real_distributions = (
            optuna.distributions.UniformDistribution,
            optuna.distributions.LogUniformDistribution,
            optuna.distributions.DiscreteUniformDistribution,
            optuna.distributions.FloatDistribution,
        )
        int_distributions = (
            optuna.distributions.IntUniformDistribution,
            optuna.distributions.IntLogUniformDistribution,
            optuna.distributions.IntDistribution,
        )
        categorical_distributions = (
            optuna.distributions.CategoricalDistribution, )
        supported_distributions = (real_distributions + int_distributions +
                                   categorical_distributions)

        for param_name, param_distribution in distributions.items():
            if isinstance(param_distribution, real_distributions):
                self._hp_params[param_name] = hp.HParam(
                    param_name,
                    hp.RealInterval(float(param_distribution.low),
                                    float(param_distribution.high)),
                )
            elif isinstance(param_distribution, int_distributions):
                self._hp_params[param_name] = hp.HParam(
                    param_name,
                    hp.IntInterval(param_distribution.low,
                                   param_distribution.high),
                )
            elif isinstance(param_distribution, categorical_distributions):
                self._hp_params[param_name] = hp.HParam(
                    param_name,
                    hp.Discrete(param_distribution.choices),
                )
            else:
                distribution_list = [
                    distribution.__name__
                    for distribution in supported_distributions
                ]
                raise NotImplementedError(
                    "The distribution {} is not implemented. "
                    "The parameter distribution should be one of the {}".
                    format(param_distribution, distribution_list))
Esempio n. 28
0
 def construct_hparams(self,params):
     hparams = list()
     hparams_values = list()
     for key,value in params.items():
         hparam = hp.HParam(key, hp.Discrete(value))
         hparams.append(hparam)
         hparams_values.append(value)
     return hparams,hparams_values
Esempio n. 29
0
 def test_convert_hyperparams_to_hparams_fixed(self, name, value):
     hps = hp_module.HyperParameters()
     hps.Fixed(name, value)
     hparams = utils.convert_hyperparams_to_hparams(hps)
     expected_hparams = {
         hparams_api.HParam(name, hparams_api.Discrete([value])): value,
     }
     self.assertEqual(repr(hparams), repr(expected_hparams))
Esempio n. 30
0
 def test_convert_hyperparams_to_hparams_fixed_bool(self):
     hps = hp_module.HyperParameters()
     hps.Fixed("condition", True)
     hparams = utils.convert_hyperparams_to_hparams(hps)
     expected_hparams = {
         hparams_api.HParam("condition", hparams_api.Discrete([True])): True,
     }
     self.assertEqual(repr(hparams), repr(expected_hparams))