Exemple #1
0
    def __init__(self, config_file_name, seed=0, verbose=False):

        self.verbose = verbose
        self.env = blackbox.EnvBlackBox(seed)

        #print environment info
        if (verbose):
            self.env.print_info()

        #init DQN agent
        self.agent = agent_dqn.DQNAgent(self.env, config_file_name, 0.3, 0.05,
                                        0.99999)
        #self.agent = agent.Agent(self.env)

        #iterations count
        self.training_iterations = 100000
        self.testing_iterations = 10000
Exemple #2
0
    def __init__(self, seed=0, verbose=False):

        self.verbose = verbose
        self.env = blackbox.EnvBlackBox(seed)

        #print environment info
        if (verbose):
            self.env.print_info()

        #init DQN agent
        self.agent = dqn.DQNAgent(
            self.env, "networks/black_box_network/net_0_parameters.json", 0.3,
            0.05, 0.99999)

        #iterations count
        self.training_iterations = 100000
        self.testing_iterations = 10000
Exemple #3
0
    def __init__(self,
                 bot_file_name,
                 training_iterations,
                 testing_iterations,
                 seed=0,
                 verbose=False,
                 render=False):

        self.verbose = verbose
        self.render = render
        self.env = blackbox.EnvBlackBox(seed)

        #print environment info
        if (verbose):
            self.env.print_info()
            print("loading agent ", bot_file_name)

        #init DQN agent
        self.agent = libs.libs_agent.agent_dqn.DQNAgent(
            self.env, bot_file_name, 0.3, 0.05, 0.99999)

        #iterations count
        self.training_iterations = training_iterations
        self.testing_iterations = testing_iterations
Exemple #4
0
import sys

sys.path.append("..")  # Adds higher directory to python modules path.

import libs.libs_env.blackbox.env_black_box as env_black_box
import libs.libs_agent.agent_dqn

#init environment
env = env_black_box.EnvBlackBox(4)

#print environment info
env.print_info()

gamma = 0.95
replay_buffer_size = 256
epsilon_training = 1.0
epsilon_testing = 0.05
epsilon_decay = 0.9999

agent = libs.libs_agent.agent_dqn.DQNAgent(
    env, "networks/black_box_network/network_config.json", gamma,
    replay_buffer_size, epsilon_training, epsilon_testing, epsilon_decay)

#process training
training_iterations = 100000

for i in range(0, training_iterations):
    agent.main()

    if env.get_iterations() % 256 == 0:
        print(" iterations = ", env.get_iterations(), " score = ",