Ejemplo n.º 1
0
    def summary(self, extended=False):
        subsection('Training parameters')
        settings = {"idx": self.idx, "model size": self.model_size}
        if extended:
            settings.update({
                "training size": self.training_size,
                "validation size": self.validation_size,
                "batch size": self.batch_size
            })
        display_settings(settings)

        subsection("Hyper-parameters")
        # group params
        data_by_group = defaultdict(dict)
        for data in self.hyper_parameters.values():
            data_by_group[data['group']][data['name']] = data['value']

        # Generate the table.
        rows = [['Group', 'Hyper-parameter', 'Value']]
        idx = 0
        for grp in sorted(data_by_group.keys()):
            for param, value in data_by_group[grp].items():
                row = [grp, param, value]
                if idx % 2:
                    row = colorize_row(row, 'cyan')
                rows.append(row)
                idx += 1
        display_table(rows)
Ejemplo n.º 2
0
    def summary(self, extended=False):
        """Display a summary of the tuner state

        Args:
            extended (bool, optional):Display an extended summay.
            Defaults to False.
        """
        if self.debug:
            extended = True
        subsection('Tuning parameters')
        summary = {'tuner': self.name}

        if not extended:
            for attr in self.to_report:
                summary[attr] = getattr(self, attr)
        else:
            for attr in self.user_parameters:
                if attr in ['user_info']:
                    continue
                summary[attr] = getattr(self, attr)
            summary['log file'] = self.log_file
        display_settings(summary)

        if len(self.user_info) and extended:
            subsection('User info')
            display_settings(self.user_info)

        self.host.summary(extended=extended)
Ejemplo n.º 3
0
    def summary(self, extended=False):
        "display cloud service status summary"
        human_time = datetime.utcfromtimestamp(self.last_update)
        human_time = human_time.strftime('%Y-%m-%dT%H:%M:%SZ')

        section('Cloud service status')
        info = {"status": self.status, "last update": human_time}
        display_settings(info)
Ejemplo n.º 4
0
 def summary(self, extended=False):
     subsection('Directories')
     settings = {
         "results": self.results_dir,
         "tmp": self.tmp_dir,
         "export": self.export_dir
     }
     display_settings(settings)
     if extended:
         config._Host.summary(extended=extended)
Ejemplo n.º 5
0
 def summary(self, extended=False):
     "display statistics summary"
     subsection("Tuning stats")
     display_settings(self.to_config())
Ejemplo n.º 6
0
    def __init__(self, model_fn, objective, **kwargs):
        """ RandomSearch hypertuner
        Args:
            model_fn (function): Function that returns the Keras model to be
            hypertuned. This function is supposed to return a different model
            at every invocation via the use of distribution.* hyperparameters
            range.

            objective (str): Name of the metric to optimize for. The referenced
            metric must be part of the the `compile()` metrics.

        Attributes:
            epoch_budget (int, optional): how many epochs to hypertune for.
            Defaults to 100.

            max_budget (int, optional): how many epochs to spend at most on
            a given model. Defaults to 10.

            min_budget (int, optional): how many epochs to spend at least on
            a given model. Defaults to 3.

            num_executions(int, optional): number of execution for each model.
            Defaults to 1.

            project (str, optional): project the tuning belong to.
            Defaults to 'default'.

            architecture (str, optional): Name of the architecture tuned.
            Default to 'default'.

            user_info(dict, optional): user supplied information that will be
            recorded alongside training data. Defaults to {}.

            label_names (list, optional): Label names for confusion matrix.
            Defaults to None, in which case the numerical labels are used.

            max_model_parameters (int, optional):maximum number of parameters
            allowed for a model. Prevent OOO issue. Defaults to 2500000.

            checkpoint (Bool, optional): Checkpoint model. Setting it to false
            disable it. Defaults to True

            dry_run(bool, optional): Run the tuner without training models.
            Defaults to False.

            debug(bool, optional): Display debug information if true.
            Defaults to False.

            display_model(bool, optional):Display model summary if true.
            Defaults to False.

            results_dir (str, optional): Tuning results dir.
            Defaults to results/. Can specify a gs:// path.

            tmp_dir (str, optional): Temporary dir. Wiped at tuning start.
            Defaults to tmp/. Can specify a gs:// path.

            export_dir (str, optional): Export model dir. Defaults to export/.
            Can specify a gs:// path.

        FIXME:
         - Deal with early stop correctly
         - allows different halving ratio for epochs and models
         - allows differnet type of distribution

        """

        super(UltraBand, self).__init__(model_fn, objective, "UltraBand",
                                        RandomDistributions, **kwargs)

        self.config = UltraBandConfig(kwargs.get('ratio',
                                                 3), self.state.min_epochs,
                                      self.state.max_epochs,
                                      self.state.epoch_budget)

        self.epoch_budget_expensed = 0

        settings = {
            "Epoch Budget": self.state.epoch_budget,
            "Num Models Sequence": self.config.model_sequence,
            "Num Epochs Sequence": self.config.epoch_sequence,
            "Num Brackets": self.config.num_brackets,
            "Number of Iterations": self.config.num_batches,
            "Total Cost per Band": self.config.total_epochs_per_band
        }

        section('UltraBand Tuning')
        subsection('Settings')
        display_settings(settings)