Exemple #1
0
    def from_json(cls, obj):
        """Recreate a SearchSpace from its JSON representation.

        Parameters
        ----------
        obj : dict
            JSON encoding of the SearchSpace

        Returns
        -------
        searchspace : `pyrameter.searchspace.SearchSpace`
        """
        domains = [Domain.from_json(d) for d in obj['domains']]
        searchspace = cls(domains, exp_key=obj['exp_key'])
        trials = []
        for t in obj['trials']:
            trial = Trial(searchspace,
                          hyperparameters=t['hyperparameters'],
                          results=t['results'],
                          objective=t['objective'],
                          errmsg=t['errmsg'])
            trial.dirty = False
            trials.append(trial)
        searchspace.trials = trials
        searchspace._complexity = obj['complexity']
        searchspace._uncertainty = obj['uncertainty']
        return searchspace
    def __call__(self, method=None, to_dict=False):
        """Generate a new trial for this search space.

        Parameters
        ----------
        to_dict : bool
            Convert the hyperparameter values to a nested dictionary on return.

        Returns
        -------
        trial : ``pyrameter.trial.Trial`` or dict
            Trial data, including hyperparameter values and metadata for a
            database. If ``to_dict`` is ``True``, instead return only the
            nested dictionary of hyperparameter values matching the structure
            of the original specification.
        """
        self.ready = self.population is None or all(
            [t.objective is not None for t in self.population])

        if self.ready:
            if method is None:
                method = RandomSearch()
            population = method(self)
            if not isinstance(population, list) or isinstance(
                    population, np.ndarray) and population.ndim == 1:
                population = [population]
            self.population = [
                Trial(self, hyperparameters=h) for h in population
            ]
            self.trials.extend(self.population)
            self.generations += 1
            return [t.parameter_dict
                    for t in self.population] if to_dict else self.population
        else:
            return []
    def __call__(self, method=None, to_dict=False):
        """Generate a new trial for this search space if ready.

        Parameters
        ----------
        to_dict : bool
            Convert the hyperparameter values to a nested dictionary on return.

        Returns
        -------
        trial : ``pyrameter.trial.Trial`` or dict or None
            Trial data, including hyperparameter values and metadata for a
            database. If ``to_dict`` is ``True``, instead return only the
            nested dictionary of hyperparameter values matching the structure
            of the original specification. If ``None`` is returned, then the
            space requires more in-progress trials to return before
            more hyperparameters may be generated. This last case is to
            ensure proper warm-up sampling for model-based methods.
        """
        if method is None:
            method = RandomSearch()

        hyperparameters = method(self)
        for i, d in enumerate(self.domains):
            d.current = hyperparameters[i]
        trial = Trial(self, hyperparameters=hyperparameters)
        self.trials.append(trial)
        return trial.parameter_dict if to_dict else trial
    def from_json(cls, obj):
        """Recreate a SearchSpace from its JSON representation.

        Parameters
        ----------
        obj : dict
            JSON encoding of the SearchSpace

        Returns
        -------
        searchspace : `pyrameter.searchspace.SearchSpace`
        """
        domains = [Domain.from_json(d) for d in obj['domains']]
        searchspace = cls(domains, exp_key=obj['exp_key'])
        trials = []
        for t in obj['trials']:
            t['searchspace'] = searchspace
            trial = Trial.from_json(t)
            trial.dirty = False
            trials.append(trial)
        searchspace.trials = trials
        searchspace._complexity = obj['complexity']
        searchspace._uncertainty = obj['uncertainty']
        print(obj)
        searchspace.id = obj['id']
        return searchspace
Exemple #5
0
def test_parameter_dict():
    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s, hyperparameters=[8])
    assert t.parameter_dict == {'A': 8}

    s = SearchSpace([ConstantDomain('/A', 8), ConstantDomain('/B/a/b', 2)])
    t = Trial(s, hyperparameters=[8, 2])
    assert t.parameter_dict == {'A': 8, 'B': {'a': {'b': 2}}}

    s = SearchSpace([
        ConstantDomain('/A', 8),
        ConstantDomain('/B/a/b', 2),
        ConstantDomain('/B/a/c', 4)
    ])
    t = Trial(s, hyperparameters=[8, 2, 4])
    assert t.parameter_dict == {'A': 8, 'B': {'a': {'b': 2, 'c': 4}}}
 def __call__(self, method=None, to_dict=False):
     try:
         hyperparameters = np.array(list(next(self._iterator)))
         trial = Trial(self, hyperparameters=hyperparameters)
         self.trials.append(trial)
     except StopIteration:
         self.done = True
         return None
Exemple #7
0
    def __call__(self, method=None, to_dict=False):
        """Generate a new trial for this search space.

        Parameters
        ----------
        to_dict : bool
            Convert the hyperparameter values to a nested dictionary on return.

        Returns
        -------
        trial : ``pyrameter.trial.Trial`` or dict
            Trial data, including hyperparameter values and metadata for a
            database. If ``to_dict`` is ``True``, instead return only the
            nested dictionary of hyperparameter values matching the structure
            of the original specification.
        """
        if method is None:
            method = random_search
        hyperparameters = method(self)
        trial = Trial(self, hyperparameters=hyperparameters)
        self.trials.append(trial)
        return trial.parameter_dict if to_dict else trial
Exemple #8
0
def tpe(space, best_split=0.2, n_samples=10, warm_up=10, **gmm_kws):
    """Tree-structured Parzen Enstimators for generating hyperparameters.

    Parameters
    ----------
    space : pyrameter.domains.SearchSpace
        The space to generate values from.
    best_split : float in [0, 1]
        The percentage of results to use for the top-k mixture model.
    n_samples : int
        The number of candidate samples to generate.
    warm_up : int
        The number of random search iterations to use to seed TPE.

    Other Parameters
    ----------------
    **gmm_kws
        Additional keyword arguments to parameterize the Gaussian Mixture
        Models.

    Returns
    -------
    values : array-like
        The array of hyperparameter values with the highest expected
        improvement from among the candidate ``n_samples``.
    """
    # Warm up with random search and inject new random search
    # hyperparameters at an interval. This attempts to prevent TPE from
    # converging too quickly.
    if len(space.objective) < warm_up or len(space.objective) % warm_up == 0:
        params = random_search(space)
    else:
        params = []

        # Collect all of the evaluated hyperparameter values and their
        # associated objective function value into a feature vector.
        features = space.to_array().T
        losses = np.array(space.objective)

        # Sort the hyperparameters by their performance and split into
        # the "best" and "rest" performers.
        idx = np.argsort(losses, axis=0)
        split = int(np.ceil(idx.shape[0] * best_split))
        losses = np.reshape(losses, (-1, 1))

        # Model the objective function based on each feature.
        for j in range(features.shape[0]):
            l = GaussianMixture(**gmm_kws)
            g = GaussianMixture(**gmm_kws)
            l.fit(np.reshape(features[j, idx[:split]], (-1, 1)),
                  losses[idx[:split]])
            l.fit(np.reshape(features[j, idx[split:]], (-1, 1)),
                  losses[idx[split:]])

            # Sample hyperparameter values from the "best" model and score
            # the samples with each model.
            samples, _ = l.sample(n_samples=n_samples)
            score_l = l.score(samples)
            score_g = g.score(samples)

            # Compute the expected improvement; i.e. maximize the l score
            # while minimizing the g score. Higher values are better.
            ei = score_l / score_g
            best = samples[np.argmax(np.squeeze(ei).ravel())]

            # Add the value with the best expected improvement
            domain = space.nodes[j]
            params.append(domain.map_to_domain(best[0]), bound=True)

            params = Trial(space, hyperparameters=params)
            space.results.append(params)
    return params
Exemple #9
0
def test_to_json():
    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s)
    assert t.to_json() == {
        'searchspace': s.id,
        'status': TrialStatus.INIT.value,
        'hyperparameters': None,
        'results': None,
        'objective': None,
        'errmsg': None
    }

    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s, hyperparameters=[8])
    assert t.to_json() == {
        'searchspace': s.id,
        'status': TrialStatus.READY.value,
        'hyperparameters': [8],
        'results': None,
        'objective': None,
        'errmsg': None
    }

    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s, hyperparameters=[8], objective=0.374)
    assert t.to_json() == {
        'searchspace': s.id,
        'status': TrialStatus.READY.value,
        'hyperparameters': [8],
        'results': None,
        'objective': 0.374,
        'errmsg': None
    }

    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s, hyperparameters=[8], objective=0.374, errmsg='hi')
    assert t.to_json() == {
        'searchspace': s.id,
        'status': TrialStatus.ERROR.value,
        'hyperparameters': [8],
        'results': None,
        'objective': 0.374,
        'errmsg': 'hi'
    }
Exemple #10
0
def test_init():
    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s)
    assert t.searchspace() is s
    assert t.hyperparameters is None
    assert t.results is None
    assert t.objective is None
    assert t.errmsg is None
    assert t.dirty
    assert t.status == TrialStatus.INIT

    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s, hyperparameters=[8])
    assert t.searchspace() is s
    assert t.hyperparameters == [8]
    assert t.results is None
    assert t.objective is None
    assert t.errmsg is None
    assert t.dirty
    assert t.status == TrialStatus.READY

    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s, hyperparameters=[8], results={'loss': 10}, objective=10)
    assert t.searchspace() is s
    assert t.hyperparameters == [8]
    assert t.results == {'loss': 10}
    assert t.objective == 10
    assert t.errmsg is None
    assert t.dirty
    assert t.status == TrialStatus.DONE

    s = SearchSpace([ConstantDomain('A', 8)])
    t = Trial(s,
              hyperparameters=[8],
              results={'loss': 10},
              objective=10,
              errmsg='HI!')
    assert t.searchspace() is s
    assert t.hyperparameters == [8]
    assert t.results == {'loss': 10}
    assert t.objective == 10
    assert t.errmsg == 'HI!'
    assert t.dirty
    assert t.status == TrialStatus.ERROR