Exemple #1
0
import numpy as np
from mlrose import mlrose

edges = [(0, 1), (1, 2), (0, 2), (1, 3), (2, 3), (3, 4)]
fitness = mlrose.MaxKColor(edges)
problem = mlrose.DiscreteOpt(length=5,
                             fitness_fn=fitness,
                             maximize=False,
                             max_val=2)

rhc = mlrose.RHCRunner(
    problem=problem,
    experiment_name="RHC_final",
    output_directory=
    "/Users/matthieudivet/Desktop/GaTech/Classes/ML/Assignments/Randomized_optimization/k_color_problem",
    seed=None,
    iteration_list=2**np.arange(14),
    max_attempts=1000,
    restart_list=[0])
rhc_run_stats, rhc_run_curves = rhc.run()

sa = mlrose.SARunner(
    problem=problem,
    experiment_name="SA_final",
    output_directory=
    "/Users/matthieudivet/Desktop/GaTech/Classes/ML/Assignments/Randomized_optimization/k_color_problem",
    seed=None,
    iteration_list=2**np.arange(14),
    max_attempts=1000,
    temperature_list=[250],
    decay_list=[mlrose.ExpDecay])
from mlrose import mlrose
import numpy as np
import matplotlib.pyplot as plt
import time
from util import plot_fitness_curve

np.random.seed(seed=0)

# Initialize fitness function object
fitness_fn = mlrose.ContinuousPeaks(t_pct=0.1)

state = np.random.randint(low=0, high=2, size=1000)

# Define optimization problem object
problem_fit = mlrose.DiscreteOpt(length=len(state),
                                 fitness_fn=fitness_fn,
                                 maximize=True,
                                 max_val=2)
iterations = [1, 10, 100, 1000, 10000]
# ________________________________________________________________________________________________________________________
times_ga = []
fitns_ga = []
# Solve problem using the genetic algorithm
for i in iterations:
    start = time.time()
    best_state, best_fitness, c = mlrose.genetic_alg(problem_fit,
                                                     pop_size=200,
                                                     mutation_prob=0.1,
                                                     curve=True,
                                                     max_iters=i,
                                                     random_state=3)
Exemple #3
0
import pickle

ps = 32
mimic_maxs = []
mimic_its = []
for i in range(5):
    final_results_maxs = []
    final_results_it = []
    for p in range(2, ps + 1):
        print(p)

        fitness = mlrose.FlipFlop()
        istate = np.array(np.zeros(p), dtype=int)

        problem = mlrose.DiscreteOpt(length=p,
                                     fitness_fn=fitness,
                                     maximize=True,
                                     max_val=2)

        schedule = mlrose.ExpDecay(init_temp=0.5,
                                   exp_const=0.005,
                                   min_temp=0.001)

        best_state, best_fitness, fitness_curve = mlrose.mimic(
            problem,
            max_attempts=500,
            pop_size=500,
            keep_pct=0.3,
            max_iters=1000,
            curve=True)

        # Define alternative N-Queens fitness function for maximization problem
Exemple #4
0
import numpy as np
import mlrose.mlrose as mlrose
import pandas as pd
import pickle

ps =  16 
results = []
for i in range(10,1000,25):
    n_queen = ps
    fitness_1 = mlrose.Queens()

    problem = mlrose.DiscreteOpt(length = n_queen, fitness_fn = fitness_1,
                             maximize = False, max_val = n_queen)
 

    
    istate = np.array(np.zeros(ps), dtype=int)


    schedule = mlrose.ExpDecay(init_temp=0.5, exp_const=0.005, min_temp=0.001)

    best_state, best_fitness,fitness_curve = mlrose.mimic(problem,max_attempts =500,pop_size=i,keep_pct=0.3, max_iters = 500,random_state = 1,curve=True)
    # Define alternative N-Queens fitness function for maximization problem
    # Initialize custom fitness function object
    # Define initial state

    print('The best state found is: ', best_state)
    print('The fitness at the best state is: ', best_fitness)