def load_training_data(params, config): """Load and return the training data.""" # Load data if params['is_conditional']: raise ValueError("Not supported yet.") else: labels = None # Load data LOGGER.info("Loading training data.") data = load_data(config['data_source'], config['data_filename']) LOGGER.info("Training data size: %d", len(data)) # Build dataset LOGGER.info("Building dataset.") dataset = get_dataset(data, labels, config['batch_size'], params['data_shape'], config['use_random_transpose'], config['n_jobs']) # Create iterator if params['is_conditional']: train_x, train_y = dataset.make_one_shot_iterator().get_next() else: train_x, train_y = dataset.make_one_shot_iterator().get_next(), None return train_x, train_y
def load_training_data(params, config): """Load and return the training data.""" # Load data if params['is_conditional']: # greg: ¿Qué significa este error?? raise ValueError("Not supported yet.") else: labels = None LOGGER.info("Loading training data.") data = load_data(config['data_source'], config['data_filename']) LOGGER.info("Training data size: %d", len(data)) # Build dataset LOGGER.info("Building dataset.") dataset = get_dataset(data, labels, config['batch_size'], params['data_shape'], config['use_random_transpose'], config['n_jobs']) # Create iterator if params[ 'is_conditional']: # greg: ESTO NUNCA PASA, YA QUE is_conditional arroja Error Not supported yet en el método load_training_data # greg: ¿Que hace make_one_shot_iterator.get_next()? train_x, train_y = dataset.make_one_shot_iterator().get_next() else: train_x, train_y = dataset.make_one_shot_iterator().get_next(), None return train_x, train_y
def load_training_data(params, config): """Load and return the training data.""" # Load data if params['is_conditional']: raise ValueError("Not supported yet.") else: labels = None LOGGER.info("Loading training data.") data = load_data(config['data_source'], config['data_filename']) LOGGER.info("Training data size: %d", len(data)) # Build dataset LOGGER.info("Building dataset.") dataset = get_dataset( data, labels, config['batch_size'], params['data_shape'], config['use_random_transpose'], config['n_jobs']) # Create iterator if params['is_conditional']: train_x, train_y = dataset.make_one_shot_iterator().get_next() else: train_x, train_y = dataset.make_one_shot_iterator().get_next(), None return train_x, train_y