Пример #1
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

__author__ = 'leferrad'

from learninspy.utils.data import *
from learninspy.utils.fileio import get_logger
from learninspy.context import sc

import os
import shutil

logger = get_logger(name=__name__)

TEMP_PATH = "/tmp/"


class TestLabeledDataset(object):
    def __init__(self):
        logger.info("Testeo de instances de LabeledDataset con datos de MNIST")
        # Datos
        logger.info("Cargando datos en memoria...")
        train, valid, test = load_mnist()
        self.data = train[:50]

    def test_load_data(self):
        # DistributedLabeledDataSet
        logger.info("Testeando carga de datos en DistributedLabeledDataset...")
        distributed_dataset = DistributedLabeledDataSet(self.data)
        d_features = distributed_dataset.features
        d_labels = distributed_dataset.labels
Пример #2
0
"""Ejemplos de uso para entrenar redes neuronales con Learninspy
utilizando datos de Combined Cycle Power Plant (regresión)."""

__author__ = 'leferrad'

from learninspy.core.model import NeuralNetwork, NetworkParameters
from learninspy.core.optimization import OptimizerParameters
from learninspy.core.stops import criterion
from learninspy.utils.data import LocalLabeledDataSet, load_ccpp
from learninspy.utils.evaluation import RegressionMetrics
from learninspy.utils.plots import plot_fitting
from learninspy.utils.fileio import get_logger

import os

logger = get_logger(name='learninspy-demo_ccpp')

# -- 1.a) Carga de datos

logger.info("Cargando datos de Combined Cycle Power Plant ...")
dataset = load_ccpp()
dataset = LocalLabeledDataSet(dataset)
rows, cols = dataset.shape
logger.info("Dimension de datos: %i x %i", rows, cols)

train, valid, test = dataset.split_data([0.5, 0.3,
                                         0.2])  # Particiono en conjuntos

# -- 1.b) Normalización
"""
std = StandardScaler()
Пример #3
0
# -*- coding: utf-8 -*-
"""Ejemplos de uso para entrenar redes neuronales con Learninspy utilizando datos de Iris (clasificación)."""

__author__ = 'leferrad'

from learninspy.core.model import NeuralNetwork, NetworkParameters
from learninspy.core.optimization import OptimizerParameters
from learninspy.core.stops import criterion
from learninspy.utils.data import LocalLabeledDataSet, load_iris
from learninspy.utils.evaluation import ClassificationMetrics
from learninspy.utils.plots import plot_fitting
from learninspy.utils.fileio import get_logger

import os

logger = get_logger(name='learninspy-demo_iris')

# Aca conviene hacer de demo:
# *Examinar diferencias en resultados con diferentes funciones de consenso
# *Explorar criterios de corte
# ** MaxIterations de 5 a 10 cambia mucho la duracion final

# -- 1) Carga de datos

logger.info("Cargando datos de Iris ...")
dataset = load_iris()
dataset = LocalLabeledDataSet(dataset)
rows, cols = dataset.shape
logger.info("Dimension de datos: %i x %i", rows, cols)

train, valid, test = dataset.split_data([0.7, 0.1,
Пример #4
0
# -*- coding: utf-8 -*-
"""Ejemplos de uso para entrenar redes neuronales con Learninspy utilizando datos de Iris (clasificación)."""

__author__ = 'leferrad'

from learninspy.core.model import NeuralNetwork, NetworkParameters
from learninspy.core.optimization import OptimizerParameters
from learninspy.core.stops import criterion
from learninspy.utils.data import LocalLabeledDataSet, load_iris
from learninspy.utils.evaluation import ClassificationMetrics
from learninspy.utils.plots import plot_fitting
from learninspy.utils.fileio import get_logger

import os

logger = get_logger(name='learninspy-ejemplo_iris')

# Aca conviene hacer de demo:
# *Examinar diferencias en resultados con diferentes funciones de consenso
# *Explorar criterios de corte
# ** MaxIterations de 5 a 10 cambia mucho la duracion final

# -- 1) Carga de datos

logger.info("Cargando datos de Iris ...")
dataset = load_iris()
dataset = LocalLabeledDataSet(dataset)
rows, cols = dataset.shape
logger.info("Dimension de datos: %i x %i", rows, cols)

train, valid, test = dataset.split_data([0.7, 0.1,
Пример #5
0
"""Ejemplos de uso para entrenar redes neuronales con Learninspy
utilizando datos de Combined Cycle Power Plant (regresión)."""

__author__ = 'leferrad'

from learninspy.core.model import NeuralNetwork, NetworkParameters
from learninspy.core.optimization import OptimizerParameters
from learninspy.core.stops import criterion
from learninspy.utils.data import LocalLabeledDataSet, load_ccpp
from learninspy.utils.evaluation import RegressionMetrics
from learninspy.utils.plots import plot_fitting
from learninspy.utils.fileio import get_logger

import os

logger = get_logger(name='learninspy-demo_ccpp')

# -- 1.a) Carga de datos

logger.info("Cargando datos de Combined Cycle Power Plant ...")
dataset = load_ccpp()
dataset = LocalLabeledDataSet(dataset)
rows, cols = dataset.shape
logger.info("Dimension de datos: %i x %i", rows, cols)

train, valid, test = dataset.split_data([0.5, 0.3, 0.2])  # Particiono en conjuntos

# -- 1.b) Normalización
"""
std = StandardScaler()
std.fit(train)
Пример #6
0
"""Ejemplos de uso para entrenar redes neuronales con Learninspy utilizando datos de Iris (clasificación)."""

__author__ = 'leferrad'

from learninspy.core.model import NeuralNetwork, NetworkParameters
from learninspy.core.optimization import OptimizerParameters
from learninspy.core.stops import criterion
from learninspy.utils.data import LocalLabeledDataSet, load_iris
from learninspy.utils.evaluation import ClassificationMetrics
from learninspy.utils.plots import plot_fitting
from learninspy.utils.fileio import get_logger

import os

logger = get_logger(name='learninspy-demo_iris')

# Aca conviene hacer de demo:
# *Examinar diferencias en resultados con diferentes funciones de consenso
# *Explorar criterios de corte
# ** MaxIterations de 5 a 10 cambia mucho la duracion final


# -- 1) Carga de datos

logger.info("Cargando datos de Iris ...")
dataset = load_iris()
dataset = LocalLabeledDataSet(dataset)
rows, cols = dataset.shape
logger.info("Dimension de datos: %i x %i", rows, cols)