Exemplo n.º 1
0
 def __init__(self, experiment_params):
     super().__init__()
     self.reevaluate_elites = experiment_params["LEAF"].get(
         "reevaluate_elites", True)
     esp_service = EspService(experiment_params)
     print("Requesting base model...")
     self.base_model = esp_service.request_base_model()
     print("Based model received.")
    def test_grpc_options(self):
        """
        Make sure gRPC options can be passed to the gRPC service.
        :return: nothing
        """
        # No options
        service = EspService(EXPERIMENT_PARAMS)
        self.assertIsNotNone(service)
        # Can't really test the options themselves.

        # With gRPC options
        service = EspService(EXPERIMENT_PARAMS_WITH_GRPC_OPTIONS)
        self.assertIsNotNone(service)
Exemplo n.º 3
0
def train(experiment_params, evaluator, checkpoint_id=None):
    esp_service = EspService(experiment_params)
    persistor = EspPersistor(experiment_params, evaluator)
    experiment_id = experiment_params["LEAF"]["experiment_id"]
    timestamp = time.strftime("%Y%m%d-%H%M%S")

    print("Starting training:")
    print("  experiment_id: {}".format(experiment_id))
    print("  checkpoint_id: {}".format(checkpoint_id))
    print("  timestamp: {}".format(timestamp))
    print("  experiment results dir: {}".format(
        persistor.get_persistence_directory()))

    # Starting point
    if checkpoint_id:
        # We're starting with a check-pointed population.
        print("Asking ESP for a check-pointed population: {}...".format(
            checkpoint_id))
        response = esp_service.get_previous_population(experiment_id,
                                                       checkpoint_id)
        current_gen = response.generation_count
    else:
        # We're starting with the 1st generation, which is a 'seed' population because we create it from scratch
        print("Asking ESP for a seed generation...")
        response = esp_service.get_next_population(prev_response=None)
        print("Seed generation received.")
        current_gen = 1
    nb_gen = experiment_params["evolution"]["nb_generations"]
    for gen in range(current_gen, nb_gen + 1):
        print(
            "Evaluating the following PopulationResponse for generation {}...:"
            .format(gen))
        esp_service.print_population_response(response)

        # Evaluate the population. This is going to update the metrics on the candidates contained in the response
        evaluator.evaluate_population(response)
        print("Evaluation done.")

        # Persist the evaluated population
        persistor.persist_response(response)
        print("Generation's info persisted to {}".format(
            persistor.get_persistence_directory()))

        # Print the candidates and their scores
        esp_service.print_candidates(response)

        print("Done with generation {}.".format(gen))
        print("--------\n")
        if gen < nb_gen:
            # Get a new generation from the previous one we've just evaluated
            print("Asking ESP for generation {}...:".format(gen + 1))
            response = esp_service.get_next_population(prev_response=response)
    return persistor.get_persistence_directory()
Exemplo n.º 4
0
def train(experiment_params_file, encoded_training_data_csv, output_file,
          batch_size, epochs, verbose):
    """
    Creates a model using the experiment params, trains it using the data and labels,
    and persists it as an h5 file with the output_name.
    :param experiment_params_file: the .json file containing the experiment and network description
    :param encoded_training_data_csv: the .csv file containing the encoded training data and labels
    :param output_file: the name of the file to which to save the trained keras network. Should be an .h5 file.
    :param batch_size: number of samples per gradient update
    :param epochs: the number of times to iterate over the training data arrays
    :param verbose: 0, 1, or 2. Keras verbosity mode.
                    0 = silent, 1 = verbose, 2 = one log line per epoch.
    :return: a trained Keras model
    """
    # Get the experiment params
    with open(experiment_params_file) as json_data:
        experiment_params = json.load(json_data)

    esp_service = EspService(experiment_params)
    keras_model = XDESurrogateModel.train_from_data(esp_service,
                                                    experiment_params,
                                                    encoded_training_data_csv,
                                                    batch_size=batch_size,
                                                    epochs=epochs,
                                                    verbose=verbose)
    keras_model.save(output_file)
    def test_success(self, grpc_mock):
        """
        Make sure that the service proceeds when the gRPC call succeeds.
        :param grpc_mock: Injected mock
        """
        service = EspService(EXPERIMENT_PARAMS)

        # Just use some placeholder text
        response_text = 'test_response'
        next_population_mock = Mock()
        next_population_mock.return_value = response_text
        grpc_mock.return_value.NextPopulation = next_population_mock

        response = service.get_next_population(None)

        self.assertEqual(response_text, response)
        self.assertEqual(1, next_population_mock.call_count)
    def test_retry(self, grpc_mock):
        """
        Make sure that the service makes the right number of attempts at the gRPC call, then gives up, and raises
        an exception
        :param grpc_mock: Injected mock
        """
        service = EspService(EXPERIMENT_PARAMS)
        next_population_mock = Mock(side_effect=grpc.RpcError('expected'))
        grpc_mock.return_value.NextPopulation = next_population_mock

        self.assertRaises(RetryError, service.get_next_population, None)

        expected_times_called = NB_RETRIES

        self.assertEqual(expected_times_called,
                         next_population_mock.call_count)
Exemplo n.º 7
0
}

#Save Experiment Parameters

with open(PREDICTOR_JSON, 'w') as fp:
    json.dump(experiment_params, fp)
experiment_params

# In[8]:

#Load Experiment Parameters

with open(PREDICTOR_JSON) as json_data:
    experiment_params = json.load(json_data)

esp_service = EspService(experiment_params)
model_to_save = esp_service.request_base_model()
# Compile the model to avoid warnings when loading it
model_to_save.compile(optimizer='adam',
                      loss='binary_crossentropy',
                      metrics=['accuracy'])

# In[9]:


#Function Definitions
def create_input_outputs(encoded_data_sets):
    input_output_sets = []
    for i, encoded_ds in enumerate(encoded_data_sets):
        print("Creating input/output for set #{}...".format(i + 1))
        start = time.time()