def train_sckit_dirregression(architecture, config, runconfig): """ Training process for architecture with direct regression of ahead time steps :return: """ if type(config['data']['ahead']) == list: iahead, sahead = config['data']['ahead'] else: iahead, sahead = 1, config['data']['ahead'] lresults = [] for ahead in range(iahead, sahead + 1): if runconfig.verbose: print( '************************************************************') print(f'Steps Ahead = {ahead} ') # Dataset dataset = Dataset(config=config['data'], data_path=wind_data_path) dataset.generate_dataset(ahead=ahead, mode=architecture.data_mode, remote=runconfig.remote) train_x, train_y, val_x, val_y, test_x, test_y = dataset.get_data_matrices( ) ############################################ # Model arch = architecture(config, runconfig) arch.generate_model() if runconfig.verbose: arch.summary() dataset.summary() print() ############################################ # Training arch.train(train_x, train_y, val_x, val_y) ############################################ # Results if 'descale' not in config['training'] or config['training']['descale']: lresults.append([ahead] + arch.evaluate( val_x, val_y, test_x, test_y, scaler=dataset.scaler)) else: lresults.append([ahead] + arch.evaluate(val_x, val_y, test_x, test_y)) print(strftime('%Y-%m-%d %H:%M:%S')) if config is not None: updateprocess(config, ahead) arch.log_result(lresults) return lresults
def train_sckit_sequence2sequence(architecture, config, runconfig): """ Training process for architecture with direct regression of ahead time steps :return: """ ahead = config['data']['ahead'] if (type(config['data']['ahead']) == list) else [1, config['data']['ahead']] if type(config['data']['ahead']) == list: iahead, sahead = config['data']['ahead'] else: iahead, sahead = 1, config['data']['ahead'] # lresults = [] # Dataset dataset = Dataset(config=config['data'], data_path=wind_data_path) dataset.generate_dataset(ahead=ahead, mode=architecture.data_mode, remote=runconfig.remote) train_x, train_y, val_x, val_y, test_x, test_y = dataset.get_data_matrices() ############################################ # Model arch = architecture(config, runconfig) arch.generate_model() if runconfig.verbose: arch.summary() dataset.summary() print() ############################################ # Training arch.train(train_x, train_y, val_x, val_y) ############################################ saverrors = None print('scikit') print(config['training']) print(config['training']['saverrors']) if 'saverrors' in config['training'] and config['training']['saverrors']: # saverrors = f'-{ahead[0]}-{ahead[1]}-R{iter}' ¿?por que no va? saverrors = f'-{ahead[0]}-{ahead[1]}' # Results if 'descale' not in config['training'] or config['training']['descale']: lresults = arch.evaluate(val_x, val_y, test_x, test_y, scaler=dataset.scaler,save_errors=saverrors) else: lresults = arch.evaluate(val_x, val_y, test_x, test_y, save_errors=saverrors) print(strftime('%Y-%m-%d %H:%M:%S')) if config is not None: updateprocess(config, ahead) arch.log_result(lresults) return lresults
def train_persistence(architecture, config, runconfig): """ Training process for architecture with direct regression of ahead time steps :return: """ if type(config['data']['ahead']) == list: iahead, sahead = config['data']['ahead'] else: iahead, sahead = 1, config['data']['ahead'] lresults = [] for ahead in range(iahead, sahead + 1): if runconfig.verbose: print( "-----------------------------------------------------------------------------" ) print(f"Steps Ahead = {ahead}") # Dataset dataset = Dataset(config=config['data'], data_path=wind_data_path) dataset.generate_dataset(ahead=[ahead, ahead], mode=architecture.data_mode, remote=runconfig.remote) train_x, train_y, val_x, val_y, test_x, test_y = dataset.get_data_matrices( ) # Architecture arch = architecture(config, runconfig) if runconfig.verbose: dataset.summary() if 'descale' not in config['training'] or config['training']['descale']: lresults.append([ahead] + arch.evaluate( val_x, val_y, test_x, test_y, scaler=dataset.scaler)) else: lresults.append([ahead] + arch.evaluate(val_x, val_y, test_x, test_y)) print(strftime('%Y-%m-%d %H:%M:%S')) # Update result in db if config is not None: updateprocess(config, ahead) del dataset arch.log_result(lresults) return lresults
def train_sckit_sequence2sequence(architecture, config, runconfig): """ Training process for architecture with direct regression of ahead time steps :return: """ ahead = config['data']['ahead'] if (type(config['data']['ahead']) == list) else [1, config['data']['ahead']] if type(config['data']['ahead']) == list: iahead, sahead = config['data']['ahead'] else: iahead, sahead = 1, config['data']['ahead'] # lresults = [] # Dataset dataset = Dataset(config=config['data'], data_path=wind_data_path) dataset.generate_dataset(ahead=ahead, mode=architecture.data_mode, remote=runconfig.remote) train_x, train_y, val_x, val_y, test_x, test_y = dataset.get_data_matrices() ############################################ # Model arch = architecture(config, runconfig) arch.generate_model() if runconfig.verbose: arch.summary() dataset.summary() print() ############################################ # Training arch.train(train_x, train_y, val_x, val_y) ############################################ # Results lresults = arch.evaluate(val_x, val_y, test_x, test_y) print(strftime('%Y-%m-%d %H:%M:%S')) if config is not None: updateprocess(config, ahead) arch.log_result(lresults) return lresults
def train_dirregression(architecture, config, runconfig): """ Training process for architecture with direct regression of ahead time steps :return: """ if type(config['data']['ahead']) == list: iahead, sahead = config['data']['ahead'] else: iahead, sahead = 1, config['data']['ahead'] lresults = [] if 'iter' in config['training']: niter = config['training']['iter'] else: niter = 1 for iter in range(niter): for ahead in range(iahead, sahead + 1): if runconfig.verbose: print('-----------------------------------------------------------------------------') print(f"Steps Ahead = {ahead}") # Dataset dataset = Dataset(config=config['data'], data_path=wind_data_path) dataset.generate_dataset(ahead=[ahead, ahead], mode=architecture.data_mode, remote=runconfig.remote) train_x, train_y, val_x, val_y, test_x, test_y = dataset.get_data_matrices() ############################################ # Model config['idimensions'] = train_x.shape[1:] arch = architecture(config, runconfig) if runconfig.multi == 1: arch.generate_model() else: with tf.device('/cpu:0'): arch.generate_model() if runconfig.verbose: arch.summary() arch.plot() dataset.summary() print() ############################################ # Training arch.train(train_x, train_y, val_x, val_y) ############################################ # Results lresults.append([ahead] + arch.evaluate(val_x, val_y, test_x, test_y)) print(strftime('%Y-%m-%d %H:%M:%S')) # Update result in db if config is not None and not runconfig.proxy: from Wind.DataBaseConfigurations import updateprocess updateprocess(config, ahead) arch.save('-A%d-R%02d' % (ahead, iter)) del dataset arch.log_result(lresults) return lresults
def train_sjoint_sequence2sequence(architecture, config, runconfig): """ Training process for architecture with multiple blocks of horizons Multihorizon SJOINT strategy The training is done separately for blocks of horizons (if block size is 1 this is dirregression) :return: """ if type(config['data']['ahead']) == list: iahead, sahead = config['data']['ahead'] else: iahead, sahead = 1, config['data']['ahead'] # Number of consecutive horizon elements to join in a prediction slice = config['data']['slice'] # if (sahead - (iahead-1)) % slice != 0: # raise NameError("SJOINT: slice has to be a divisor of the horizon length") lresults = [] if 'iter' in config['training']: niter = config['training']['iter'] else: niter = 1 lmodels = [] steps = [[ i, j ] for i, j in zip(range(iahead, sahead + 1, slice), range(slice, sahead + slice + 1, slice)) ] steps[-1][1] = sahead for iter in range(niter): # Loads the dataset once and slices the y matrix for training and evaluation dataset = Dataset(config=config['data'], data_path=wind_data_path) dataset.generate_dataset(ahead=[iahead, sahead], mode=architecture.data_mode, remote=runconfig.remote) train_x, train_y, val_x, val_y, test_x, test_y = dataset.get_data_matrices( ) for recit, ahead in enumerate(steps): if runconfig.verbose: print( '-----------------------------------------------------------------------------' ) print(f"Steps Ahead = {ahead}") ############################################ # Model config['idimensions'] = train_x.shape[1:] config['odimensions'] = ahead[1] - ahead[0] + 1 nconfig = deepcopy(config) nconfig['data']['ahead'] = ahead arch = architecture(nconfig, runconfig) if runconfig.multi == 1: arch.generate_model() else: with tf.device('/cpu:0'): arch.generate_model() if runconfig.verbose: arch.summary() arch.plot() dataset.summary() print() ############################################ # Training with the current slice arch.train(train_x, train_y[:, ahead[0] - 1:ahead[1]], val_x, val_y[:, ahead[0] - 1:ahead[1]]) ############################################ # Results if 'descale' not in config['training'] or config['training'][ 'descale']: lresults.extend( arch.evaluate(val_x, val_y[:, ahead[0] - 1:ahead[1]], test_x, test_y[:, ahead[0] - 1:ahead[1]], scaler=dataset.scaler)) else: lresults.extend( arch.evaluate(val_x, val_y[:, ahead[0] - 1:ahead[1]], test_x, test_y[:, ahead[0] - 1:ahead[1]])) print(strftime('%Y-%m-%d %H:%M:%S')) # Update result in db if config is not None and not runconfig.proxy: from Wind.DataBaseConfigurations import updateprocess updateprocess(config, ahead) arch.save(f"-{ahead[0]}-{ahead[1]}-S{recit:02d}-R{iter:02d}") arch.log_result(lresults) return lresults