def test_regression_case(self): config = create_configuration(filename='/regression-siso.json') config.parallel_evaluation = False genome = Genome(key=1) genome.create_random_genome() dataset = get_dataset(config.dataset, train_percentage=config.train_percentage, testing=True) n_samples = 3 network = ComplexStochasticNetwork(genome=genome) x, y_true, output_distribution = calculate_prediction_distribution( network, dataset=dataset, problem_type=config.problem_type, is_testing=True, n_samples=n_samples, use_sigmoid=False) expected_output_distribution_shape = [ len(y_true), n_samples, config.n_output ] self.assertEqual(list(output_distribution.shape), expected_output_distribution_shape)
def test_write_and_read_geneme(self): filename = self.path + '/genome_test.json' genome = Genome(key=0) genome.create_random_genome() genome.save_genome(filename) genome_read = Genome.create_from_file(filename) self.assertEqual(len(genome.__dict__), len(genome_read.__dict__))
def initialize_population(self): population = {} for i in range(self.pop_size): key = next(self.genome_indexer) if self.config.initial_genome_filename is None: genome = Genome(key=key) genome.create_random_genome() else: filename = self.config.initial_genome_filename genome = Genome.create_from_file(filename=filename, key=key) population[key] = genome self.ancestors[key] = tuple() return population
def test_classification_case(self): config = create_configuration(filename='/classification-miso.json') genome = Genome(key=1) genome.create_random_genome() n_samples = 5 estimator = PredictionDistributionEstimatorGenome(genome=genome, config=config, testing=True, n_samples=n_samples)\ .estimate() \ .enrich_with_dispersion_quantile() \ .calculate_metrics_by_dispersion_quantile() results = estimator.results self.assertTrue(isinstance(results, pd.DataFrame))
from neat.representation_mapping.genome_to_network.complex_stochastic_network import ComplexStochasticNetwork from neat.utils import timeit from config_files import create_configuration config = create_configuration(filename='/mnist_binary.json') LOGS_PATH = f'{os.getcwd()}/' logger = get_neat_logger(path=LOGS_PATH) # N_SAMPLES = 10 N_PROCESSES = 16 N_GENOMES = 100 genomes = [] for i in range(N_GENOMES): genome = Genome(key=i) genome.create_random_genome() genomes.append(genome) def evaluate_genome_parallel(x): return evaluate_genome(*x) def process_initialization(dataset_name, testing): global dataset dataset = get_dataset(dataset_name, testing=testing) dataset.generate_data() @timeit def evaluate_genome(genome: Genome, loss, beta_type, problem_type, batch_size=10000, n_samples=10, is_gpu=False):