from pyevolve import GAllele
import Oger
import pandas as pd

import sg.utils
import sg.utils.pyevolve_utils as pu
from model import Model
import load_cleansing
import load_prediction

def identity_transformer(data, genome, loci, prediction_steps):
    """This prediction model assumes tomorrow will be the same as today."""
    return data["Load"][-prediction_steps*2:-prediction_steps].tshift(prediction_steps)

def null_transformer(data, genome, loci, prediction_steps):
    """This prediction model assumes tomorrow will be entirely flat."""
    return pd.TimeSeries(data=data["Load"][:-prediction_steps].mean(),
                         index=data.index[-prediction_steps:])

class IdentityModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of a system without transformer."""    
        pass

    def _get_transform(self):
        return identity_transformer


if __name__ == "__main__":
    load_prediction.run(IdentityModelCreator)
import arima
import load_cleansing
import load_prediction

class ARModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        '''Sets up for evolution of the ARIMA model.'''    
        self._alleles.add(pu.make_int_gene(1, 1, 8*24, 5))
        self._alleles.add(pu.make_int_gene(1, 0, 8*24, 5))
        self._loci_list += ['AR_order']
        self._loci_list += ['EXO_order']

    def _get_transform(self):
        return arima.ar_ga


class ARBitmapModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        '''Sets up for evolution of the ARIMA model.'''    
        self._alleles.add(pu.make_bitmap_gene(24*8))
        self._alleles.add(pu.make_bitmap_gene(24*8))
        self._loci_list += ['AR_lags', 'EXO_lags']

    def _get_transform(self):
        return arima.bitmapped_ar_ga


if __name__ == '__main__':
    load_prediction.run(ARModelCreator)
    #load_prediction.run(ARBitmapModelCreator())
import arima
import load_cleansing
import load_prediction

class DSHWModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the DSHW model."""
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1) # alpha
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1) # beta
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1) # gamma
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1) # omega
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1) # phi
        self._loci_list += ['alpha', 'beta', 'gamma', 'omega', 'phi']

    def _get_transform(self):
        return arima.dshw


class AutoDSHWModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the DSHW model."""
        pass

    def _get_transform(self):
        return arima.auto_dshw


if __name__ == "__main__":
    load_prediction.run(DSHWModelCreator)
    #load_prediction.run(AutoDSHWModelCreator())
Ejemplo n.º 4
0

class WaveletCBRModelCreator(load_prediction.ModelCreator):
    def get_dataset(self, options):
        """This function should create the an instance of a dataset class
        according to the selected model and user options."""
        if options.bc_data:
            return RTreeBCHydroDataset(options, 31)
        elif options.total_load:
            return RTreeTotalDataset(options, 31)
        else:
            return RTreeDataset(options, 31)

    def _add_transform_genes(self):
        self._alleles.add(pu.make_int_gene(1, 2, 11, 1))  # Dimension
        # self._alleles.add(pu.make_real_gene(1, 0, 1, 0.05)) # Weight
        # self._alleles.add(pu.make_int_gene(1, 1024, 2048, 50)) # Boolean mask
        self._loci_list += ['dimension']  #, 'weight', 'mask',
        if not self._options.no_cleaning:
            #wavelet.set_local_cleaning_func(self._get_cleansing_function(options), model)
            raise NotImplementedError(
                "CBR with cleaning not updated to work with pipeline model for cleaning."
            )

    def _get_transform(self):
        return wavelet.retrieve


if __name__ == "__main__":
    load_prediction.run(WaveletCBRModelCreator)
Ejemplo n.º 5
0
"""Evolve a load predictor with BSpline data cleansing and AR/ARIMA predictor."""

from pyevolve import GAllele
import Oger

import sg.utils
import sg.utils.pyevolve_utils as pu
from model import Model
import arima
import load_cleansing
import load_prediction
import load_prediction_ar


class ARHourByHourModelCreator(load_prediction_ar.ARModelCreator):
    def _get_transform(self):
        return arima.hourbyhour_ar_ga


class ARHourByHourBitmapModelCreator(load_prediction_ar.ARBitmapModelCreator):
    def _get_transform(self):
        return arima.bitmapped_hourbyhour_ar_ga


if __name__ == "__main__":
    load_prediction.run(ARHourByHourModelCreator)
    #load_prediction.run(ARHourByHourBitmapModelCreator())
from model import Model
import wavelet
import load_cleansing
import load_prediction


class WaveletHourByHourModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """This is where the models are defined. The models are passed to the
        GA engine for evolution of the optimal set of parameters. Afterwards,
        the models are tested, and performance is measured."""

        self._alleles.add(pu.make_int_gene(1, 1, 10, 1))  # Scale
        self._alleles.add(pu.make_choice_gene(
            1, [2]))  # Aj, in the paper 2 gives best results.
        gene = pu.make_choice_gene(1, [i for i in self._hindsight_days])
        self._alleles.add(gene, weight=1)

        if options.no_cleaning:
            loci = sg.utils.Enum('scale', 'Aj')
        else:
            loci = sg.utils.Enum('scale', 'Aj', 't_smooth', 'l_smooth',
                                 't_zscore', 'l_zscore')

    def _get_transform(self):
        return wavelet.hourbyhour_multiscale_prediction_ga


if __name__ == "__main__":
    load_prediction.run(WaveletHourByHourModelCreator)
import load_prediction
import taohong

class VanillaModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        pass
    
    def _get_transform(self):
        return taohong.vanilla

    
if __name__ == '__main__':
    load_prediction.run(VanillaModelCreator)
import sg.utils
import sg.utils.pyevolve_utils as pu
from model import Model
import load_prediction


def hourly_average(data, genome, loci, prediction_steps):
    assert (prediction_steps == 24)
    start = -prediction_steps - genome[loci.hindsight]
    end = -prediction_steps
    avg_data = pd.DataFrame({"Load": data["Load"][start:end].copy()})
    avg_data["Hour of day"] = [i.hour for i in avg_data.index]
    means = avg_data.groupby(["Hour of day"]).mean()["Load"]
    return pd.TimeSeries(data=means.values,
                         index=data.index[-prediction_steps:])


class HourlyAverageModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the ARIMA model."""
        self._alleles.add(pu.make_real_gene(
            1, 0, 1, 0.1))  # Dummy to make 1D crossover work in Pyevolve
        self._loci_list += ['crossover_dummy']

    def _get_transform(self):
        return hourly_average


if __name__ == "__main__":
    load_prediction.run(HourlyAverageModelCreator)
Ejemplo n.º 9
0
import sg.utils
import sg.utils.pyevolve_utils as pu
from model import Model
import load_cleansing
import load_prediction


def identity_transformer(data, genome, loci, prediction_steps):
    """This prediction model assumes tomorrow will be the same as today."""
    return data["Load"][-prediction_steps *
                        2:-prediction_steps].tshift(prediction_steps)


def null_transformer(data, genome, loci, prediction_steps):
    """This prediction model assumes tomorrow will be entirely flat."""
    return pd.TimeSeries(data=data["Load"][:-prediction_steps].mean(),
                         index=data.index[-prediction_steps:])


class IdentityModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of a system without transformer."""
        pass

    def _get_transform(self):
        return identity_transformer


if __name__ == "__main__":
    load_prediction.run(IdentityModelCreator)
Ejemplo n.º 10
0
import load_cleansing
import load_prediction

class ESNModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the ESN model."""
        self._alleles.add(pu.make_int_gene(1, 10, 500, 25), weight=1) # Network size
        self._alleles.add(pu.make_real_gene(1, 0, 1, 0.05), weight=1) # Leak rate
        self._alleles.add(pu.make_real_gene(1, 0.1, 0.75, 0.05), weight=1) # Input scaling
        self._alleles.add(pu.make_real_gene(1, 0, 1, 0.05), weight=1) # Bias scaling
        self._alleles.add(pu.make_real_gene(1, 0.5, 2, 0.05), weight=1) # Spectral radius
        # We don't want too many seeds per evolutions, but we don't want to
        # always evolve on the same 5 networks either:
        self._alleles.add(pu.make_choice_gene(
            1, np.random.random_integers(0, 2**16, 5)), weight=1) # Seed
        # Grid optimization showed that for a training length of 336, with
        # other params set based on previous gridopts and operating on the
        # total dataset rather than single AMS'es, optimal ridge was ~5. Scaled
        # thus 5/336=0.015.
        self._alleles.add(pu.make_choice_gene(
            1, [0.0001/self._max_hindsight_hours]), weight=1) # Scaled ridge
        self._loci_list += ['size', 'leak', 'in_scale', 
                      'bias_scale', 'spectral', 'seed', 'ridge' ]

    def _get_transform(self):
        return esn.feedback_with_external_input


if __name__ == "__main__":
    load_prediction.run(ESNModelCreator)
        self._alleles.add(pu.make_int_gene(1, 0, 1e6, 100))

    def _lambda_mapper(self, lc_gene_val):
        return (np.power(10, lc_gene_val) - 1) / 1e3


class RegularizedVanillaModelCreator(load_prediction.ModelCreator):
    def __init__(self, *args, **kwargs):
        load_prediction.ModelCreator.__init__(self, *args, **kwargs)
        self._warning_printed = False

    def _add_transform_genes(self):
        '''Sets up for evolution of the regularized vanilla benchmark model.'''
        self._alleles.add(pu.make_int_gene(1, 0, 1e6, 100))
        self._loci_list += ['lambda_cont']

    def _transform(self, data, genome, loci, prediction_steps):
        if not self._warning_printed:
            print 'Hindsight genome ignored, using all available data in Vanilla model.'
            self._warning_printed = True
        svp = regul_ar.VanillaVectorPredictor(data[:-prediction_steps])
        svp.estimate(lambda_cont=genome[loci.lambda_cont])
        return svp.predict(data[-prediction_steps:])

    def _get_transform(self):
        return functools.partial(type(self)._transform, self)


if __name__ == '__main__':
    load_prediction.run(LogRegularizedVectorARModelCreator)
Ejemplo n.º 12
0
import load_cleansing
import load_prediction


class DSHWModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the DSHW model."""
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1)  # alpha
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1)  # beta
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1)  # gamma
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1)  # omega
        self._alleles.add(pu.make_real_gene(1, 0, 1, .1), weight=1)  # phi
        self._loci_list += ['alpha', 'beta', 'gamma', 'omega', 'phi']

    def _get_transform(self):
        return arima.dshw


class AutoDSHWModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the DSHW model."""
        pass

    def _get_transform(self):
        return arima.auto_dshw


if __name__ == "__main__":
    load_prediction.run(DSHWModelCreator)
    #load_prediction.run(AutoDSHWModelCreator())
"""Evolve a load predictor with BSpline data cleansing and predictor as daily or 24-hour averages."""

import numpy as np
import pandas as pd

import load_prediction
import load_prediction_averagehourly as lpah

def daily_average(data, genome, loci, prediction_steps):
    start = -prediction_steps - genome[loci.hindsight]
    end = -prediction_steps
    return pd.TimeSeries(data=data["Load"][start:end].mean(), 
                         index=data.index[-prediction_steps:])


class DailyAverageModelCreator(lpah.HourlyAverageModelCreator):
    def _get_transform(self):
        return daily_average


if __name__ == "__main__":
    load_prediction.run(DailyAverageModelCreator)
import numpy as np
import pandas as pd

import sg.utils
import sg.utils.pyevolve_utils as pu
from model import Model
import load_prediction

def hourly_average(data, genome, loci, prediction_steps):
    assert(prediction_steps == 24)
    start = -prediction_steps - genome[loci.hindsight]
    end = -prediction_steps
    avg_data = pd.DataFrame({"Load": data["Load"][start:end].copy()})
    avg_data["Hour of day"] = [i.hour for i in avg_data.index]
    means = avg_data.groupby(["Hour of day"]).mean()["Load"]
    return pd.TimeSeries(data=means.values, 
                         index=data.index[-prediction_steps:])

class HourlyAverageModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the ARIMA model."""
        self._alleles.add(pu.make_real_gene(1, 0, 1, 0.1)) # Dummy to make 1D crossover work in Pyevolve
        self._loci_list += ['crossover_dummy']

    def _get_transform(self):
        return hourly_average


if __name__ == "__main__":
    load_prediction.run(HourlyAverageModelCreator)
        self._alleles.add(pu.make_int_gene(1, 0, 1e6, 100))

    def _lambda_mapper(self, lc_gene_val):
        return (np.power(10, lc_gene_val) - 1) / 1e3

    
class RegularizedVanillaModelCreator(load_prediction.ModelCreator):
    def __init__(self, *args, **kwargs):
        load_prediction.ModelCreator.__init__(self, *args, **kwargs)
        self._warning_printed = False
        
    def _add_transform_genes(self):
        '''Sets up for evolution of the regularized vanilla benchmark model.'''    
        self._alleles.add(pu.make_int_gene(1, 0, 1e6, 100))
        self._loci_list += ['lambda_cont']

    def _transform(self, data, genome, loci, prediction_steps):
        if not self._warning_printed:
            print 'Hindsight genome ignored, using all available data in Vanilla model.'
            self._warning_printed = True
        svp = regul_ar.VanillaVectorPredictor(data[:-prediction_steps])
        svp.estimate(lambda_cont=genome[loci.lambda_cont])
        return svp.predict(data[-prediction_steps:])
    
    def _get_transform(self):
        return functools.partial(type(self)._transform, self)


if __name__ == '__main__':
    load_prediction.run(LogRegularizedVectorARModelCreator)
Ejemplo n.º 16
0
"""Evolve a load predictor with BSpline data cleansing and predictor as daily or 24-hour averages."""

import numpy as np
import pandas as pd

import load_prediction
import load_prediction_averagehourly as lpah


def daily_average(data, genome, loci, prediction_steps):
    start = -prediction_steps - genome[loci.hindsight]
    end = -prediction_steps
    return pd.TimeSeries(data=data["Load"][start:end].mean(),
                         index=data.index[-prediction_steps:])


class DailyAverageModelCreator(lpah.HourlyAverageModelCreator):
    def _get_transform(self):
        return daily_average


if __name__ == "__main__":
    load_prediction.run(DailyAverageModelCreator)
from pyevolve import GAllele
import Oger

import sg.utils
import sg.utils.pyevolve_utils as pu
from model import Model
import wavelet
import load_cleansing
import load_prediction

class WaveletModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """This is where the models are defined. The models are passed to the
        GA engine for evolution of the optimal set of parameters. Afterwards,
        the models are tested, and performance is measured."""
        self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # Scale
        self._alleles.add(pu.make_choice_gene(1, [2])) # Aj, in the paper 2 gives best results.
        self._loci_list += ['scale', 'Aj']

    def _get_transform(self):
        #return wavelet.linear_prediction
        #return wavelet.linear_vector
        #return wavelet.vector_multiscale_prediction
        #return wavelet.iterative_multiscale_prediction
        return wavelet.multiscale_prediction


if __name__ == "__main__":
    load_prediction.run(WaveletModelCreator)
        self._alleles.add(pu.make_choice_gene(1,
                                              [0, 1, 2]))  # 'I' backshift (d)
        self._alleles.add(pu.make_choice_gene(1,
                                              [1, 2, 3]))  # 'MA' backshift (q)
        self._alleles.add(pu.make_int_gene(1, 1, 10,
                                           1))  # Seasonal 'AR' backshift (p)
        self._alleles.add(pu.make_choice_gene(
            1, [0, 1, 2]))  # Seasonal 'I' backshift (d)
        self._alleles.add(pu.make_choice_gene(
            1, [1, 2, 3]))  # Seasonal 'MA' backshift (q)
        self._loci_list += [
            'AR_order', 'I_order', 'MA_order', 'ssn_AR_order', 'ssn_I_order',
            'ssn_MA_order'
        ]

    def _get_transform(self):
        return arima.seasonal_arima_with_weather


class AutoARIMAModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """Sets up for evolution of the ARIMA model."""
        pass

    def _get_transform(self):
        return arima.auto_arima_with_weather


if __name__ == "__main__":
    load_prediction.run(ARIMAModelCreator)
"""Evolve a load predictor with BSpline data cleansing and AR/ARIMA predictor."""

from pyevolve import GAllele
import Oger

import sg.utils
import sg.utils.pyevolve_utils as pu
from model import Model
import arima
import load_cleansing
import load_prediction
import load_prediction_ar

class ARHourByHourModelCreator(load_prediction_ar.ARModelCreator):
    def _get_transform(self):
        return arima.hourbyhour_ar_ga


class ARHourByHourBitmapModelCreator(load_prediction_ar.ARBitmapModelCreator):
    def _get_transform(self):
        return arima.bitmapped_hourbyhour_ar_ga


if __name__ == "__main__":
    load_prediction.run(ARHourByHourModelCreator)
    #load_prediction.run(ARHourByHourBitmapModelCreator())
import sg.utils.pyevolve_utils as pu
from model import Model
import wavelet
import load_cleansing
import load_prediction

class WaveletHourByHourModelCreator(load_prediction.ModelCreator):
    def _add_transform_genes(self):
        """This is where the models are defined. The models are passed to the
        GA engine for evolution of the optimal set of parameters. Afterwards,
        the models are tested, and performance is measured."""
        
        self._alleles.add(pu.make_int_gene(1, 1, 10, 1)) # Scale
        self._alleles.add(pu.make_choice_gene(1, [2])) # Aj, in the paper 2 gives best results.
        gene = pu.make_choice_gene(1, [i for i in self._hindsight_days])
        self._alleles.add(gene, weight=1)
        
        if options.no_cleaning:
            loci = sg.utils.Enum('scale', 'Aj')
        else:
            loci = sg.utils.Enum('scale', 'Aj', 't_smooth', 
                                 'l_smooth', 't_zscore', 'l_zscore')


    def _get_transform(self):
        return wavelet.hourbyhour_multiscale_prediction_ga


if __name__ == "__main__":
    load_prediction.run(WaveletHourByHourModelCreator)