Example #1
0
 def _fit(self, opt_params=None, stops=None, mini_batch=30, parallelism=2):
     if opt_params is None:
         options = {
             'step-rate': 1.0,
             'decay': 0.995,
             'momentum': 0.3,
             'offset': 1e-8
         }
         opt_params = OptimizerParameters(algorithm='Adadelta',
                                          options=options)
     if stops is None:
         stops = [
             criterion['MaxIterations'](30),
             criterion['AchieveTolerance'](0.95, key='hits')
         ]
     logger.info("Entrenando modelo...")
     hits_valid = self.model.fit(self.train,
                                 self.valid,
                                 valid_iters=5,
                                 mini_batch=mini_batch,
                                 parallelism=parallelism,
                                 stops=stops,
                                 optimizer_params=opt_params,
                                 keep_best=True,
                                 reproducible=True)
     return hits_valid
Example #2
0
    def __init__(self, type_model=None, net_params=None, opt_params=None, n_layers=0, n_iter=10, seed=123):

        # Datos
        logger.info("Cargando datos...")
        data = load_iris()
        dataset = LocalLabeledDataSet(data)
        self.train, self.valid, self.test = dataset.split_data([.5, .3, .2])
        self.train = self.train.collect()
        self.valid = self.valid.collect()
        self.test = self.test.collect()

        # Configuracion de optimizacion
        if opt_params is None:  # Por defecto, se utiliza Adadelta
            stops = [criterion['MaxIterations'](10),
                     criterion['AchieveTolerance'](0.95, key='hits')]
            options = {'step-rate': 1.0, 'decay': 0.995, 'momentum': 0.3, 'offset': 1e-8}
            opt_params = OptimizerParameters(algorithm='Adadelta', stops=stops,
                                             options=options, merge_criter='w_avg')
        self.opt_params = opt_params

        # Configuracion de modelo a optimizar
        if type_model is None:
            type_model = NeuralNetwork
        self.type_model = type_model
        if net_params is None:
            net_params = NetworkParameters(units_layers=[4, 10, 3], activation=False, dropout_ratios=True,
                                           classification=True, strength_l1=True, strength_l2=True, seed=seed)
        self.net_params = net_params

        # Configuracion del Random Search
        self.rnd_search = RandomSearch(self.net_params, n_layers, n_iter, net_domain=None, seed=seed)
Example #3
0
 def __init__(self, opt_params=None):
     logger.info("Test de GD Standard iniciando...")
     if opt_params is None:
         stops = [criterion['MaxIterations'](10),
                  criterion['AchieveTolerance'](0.95, key='hits')]
         options = {'step-rate': 0.01, 'momentum': 0.8, 'momentum_type': 'standard'}
         opt_params = OptimizerParameters(algorithm='GD', stops=stops,
                                          options=options, merge_criter='w_avg')
     super(TestGDStandard, self).__init__(opt_params)
Example #4
0
 def __init__(self, opt_params=None):
     logger.info("Test de Adadelta iniciando...")
     if opt_params is None:
         stops = [criterion['MaxIterations'](10),
                  criterion['AchieveTolerance'](0.95, key='hits')]
         options = {'step-rate': 1.0, 'decay': 0.99, 'momentum': 0.3, 'offset': 1e-8}
         opt_params = OptimizerParameters(algorithm='Adadelta', stops=stops,
                                          options=options, merge_criter='w_avg')
     super(TestAdadelta, self).__init__(opt_params)
Example #5
0
    def fit(self, type_model, train, valid, test, mini_batch=100, parallelism=4, valid_iters=5, measure=None,
            stops=None, optimizer_params=None, reproducible=False, keep_best=True):
        """
        Función para iniciar la búsqueda de parámetros ajustada a las especificaciones de dominio dadas,
        utilizando los conjuntos de datos ingresados y demás parámetros de optimización para usar
        en la función de modelado :func:`~learninspy.core.model.NeuralNetwork.fit` en
        :class:`~learninspy.core.model.NeuralNetwork`.

        :param type_model: class, correspondiente a un tipo de modelo del módulo :mod:`~learninspy.core.model`.

        .. note:: El resto de los parámetros son los mismos que recibe la función
            :func:`~learninspy.core.model.NeuralNetwork.fit` incluyendo también el conjunto de prueba *test*
            que se utiliza para validar la conveniencia de cada modelo logrado.
            Remitirse a la API de dicha función para encontrar información de los parámetros.

        """
        if stops is None:
            stops = [criterion['MaxIterations'](10),
                     criterion['AchieveTolerance'](0.95, key='hits')]
        if optimizer_params is None:
            local_stops = [criterion['MaxIterations'](10),
                           criterion['AchieveTolerance'](0.90, key='hits')]
            optimizer_params = OptimizerParameters(algorithm='Adadelta', stops=local_stops, merge_criter='w_avg')

        # Para comparar y quedarse el mejor modelo
        best_model = None
        best_hits = 0.0

        logger.info("Optimizacion utilizada: %s", str(optimizer_params))
        for it in xrange(self.n_iter):
            net_params_sample = self._take_sample(seed=self.seeds[it])
            logger.info("Iteracion %i en busqueda.", it+1)
            logger.info("Configuracion usada: %s", os.linesep+str(net_params_sample))
            model = type_model(net_params_sample)
            hits_valid = model.fit(train, valid, mini_batch=mini_batch, parallelism=parallelism,
                                   valid_iters=valid_iters, measure=measure, reproducible=reproducible,
                                   stops=stops, optimizer_params=optimizer_params, keep_best=keep_best)
            hits_test = model.evaluate(test, predictions=False)
            if hits_test >= best_hits:
                best_hits = hits_test
                best_model = model
        logger.info("Configuracion del mejor modelo: %s", os.linesep+str(best_model.params))
        logger.info("Hits en test: %12.11f", best_hits)
        return best_model, best_hits
Example #6
0
    def test_parallelism(self, mini_batch=10):
        logger.info("Testeando variantes del nivel de paralelismo...")

        # Datos
        logger.info("Datos utilizados: Iris")
        data = load_iris()
        dataset = LocalLabeledDataSet(data)
        self.train, self.valid, self.test = dataset.split_data([.5, .3, .2])
        self.valid = self.valid.collect()

        # Optimizacion
        options = {
            'step-rate': 1.0,
            'decay': 0.995,
            'momentum': 0.3,
            'offset': 1e-8
        }
        opt_params = OptimizerParameters(algorithm='Adadelta', options=options)
        stops = [criterion['MaxIterations'](10)]

        # Niveles de paralelismo
        parallelism = [-1, 0, 2]

        for p in parallelism:
            logger.info("Seteando paralelismo en %i", p)
            hits_valid = self._fit(opt_params=opt_params,
                                   stops=stops,
                                   mini_batch=mini_batch,
                                   parallelism=p)
            logger.info("Asegurando salidas correctas...")
            assert hits_valid > 0.7

            hits_test, pred_test = self.model.evaluate(self.test,
                                                       predictions=True,
                                                       measure='R2')
            assert hits_test > 0.7

            logger.info("OK")
        return
Example #7
0
    def __init__(self, opt_params=None):
        logger.info("Testeo de Optimizer con datos de Iris")
        # Datos
        logger.info("Cargando datos...")
        data = load_iris()
        dataset = LocalLabeledDataSet(data)
        self.train, self.valid, self.test = dataset.split_data([.5, .3, .2])
        self.train = self.train.collect()
        self.valid = self.valid.collect()
        self.test = self.test.collect()

        # Configuracion de optimizacion
        if opt_params is None:  # Por defecto, se utiliza Adadelta
            stops = [criterion['MaxIterations'](10),
                     criterion['AchieveTolerance'](0.95, key='hits')]
            options = {'step-rate': 1.0, 'decay': 0.99, 'momentum': 0.3, 'offset': 1e-8}
            opt_params = OptimizerParameters(algorithm='Adadelta', stops=stops,
                                             options=options, merge_criter='w_avg')
        self.opt_params = opt_params

        # Configuracion de modelo
        net_params = NetworkParameters(units_layers=[4, 10, 3], activation='ReLU', strength_l1=1e-5, strength_l2=3e-4,
                                       dropout_ratios=[0.2, 0.0], classification=True)
        self.model = NeuralNetwork(net_params)
Example #8
0
# --- 2.b) Parámetros de optimización
local_stops = [
    criterion['MaxIterations'](30), criterion['AchieveTolerance'](0.95,
                                                                  key='hits')
]

global_stops = [
    criterion['MaxIterations'](20), criterion['AchieveTolerance'](0.95,
                                                                  key='hits')
]

options = {'step-rate': 1.0, 'decay': 0.995, 'momentum': 0.7, 'offset': 1e-8}

optimizer_params = OptimizerParameters(algorithm='Adadelta',
                                       stops=local_stops,
                                       options=options,
                                       merge_criter='w_avg',
                                       merge_goal='cost')

logger.info("Optimizacion utilizada: %s", os.linesep + str(optimizer_params))
logger.info("Configuracion usada: %s", os.linesep + str(net_params))

# -- 3) Construcción y ajuste de red neuronal

neural_net = NeuralNetwork(net_params)

logger.info("Entrenando red neuronal ...")
hits_valid = neural_net.fit(train,
                            valid,
                            valid_iters=1,
                            mini_batch=20,