def test_ga_iter_benchmark(self): # parameters iterations = 10 # keep this value small population_size = 50 # keep this value small mutation_probability = 0.8 selection_size = 5 for i, fjs_instance in enumerate(fjs_data): print( f"testing fjs instance {fjs_instance} ({i + 1} of {len(fjs_data)})" ) try: data_instance = data.FJSData(fjs_instance) # run GA solver = Solver(data_instance) solver.genetic_algorithm_iter( iterations=iterations, population_size=population_size, mutation_probability=mutation_probability, selection_size=selection_size, benchmark=True) except Exception as e: self.fail( f'Unexpected exception raised while running GA for {fjs_instance}:' + str(e)) self.assertIsNotNone(solver.solution) self.assertIsNotNone(solver.ga_agent) # test parameters were set self.assertEqual(iterations, solver.ga_agent.iterations) self.assertFalse(solver.ga_agent.time_condition) self.assertTrue(solver.ga_agent.benchmark) self.assertEqual(population_size, solver.ga_agent.population_size) self.assertEqual(mutation_probability, solver.ga_agent.mutation_probability) self.assertEqual(selection_size, solver.ga_agent.selection_size) self.assertEqual(population_size, len(solver.ga_agent.initial_population)) self.assertEqual(population_size, len(solver.ga_agent.result_population)) # test that the result solution is better than all the solutions in the initial population for initial_sol in solver.ga_agent.initial_population: self.assertLessEqual(solver.solution, initial_sol) # test that the result population does not have duplicate solutions seen = [] self.assertTrue(not any( sol in seen or seen.append(sol) for sol in solver.ga_agent.result_population)) # output results output_file = tmp_dir / 'fjs_ga_benchmark' solver.output_benchmark_results(output_file, auto_open=False) self.assertTrue(output_file.exists(), "fjs_ga_benchmark was not produced")
def test_generate_feasible_solution_lpt( self): # Note this test fails on data/given_data fjs_data = get_files_with_suffix(project_root / 'data/fjs_data', '.fjs') for fjs_instance in random.choices(fjs_data, k=10): try: print( f"test_generate_feasible_solution_lpt with fjs data: {fjs_instance}" ) SolutionFactory(data.FJSData(fjs_instance) ).get_n_longest_process_time_first_solution(50) except InfeasibleSolutionException: self.fail("Infeasible solution was generated")
def test_crossover(self): fjs_data = get_files_with_suffix(project_root / 'data/fjs_data', '.fjs') num_choices = 10 probability_mutation = 0.5 for i, fjs_instance in enumerate(random.choices(fjs_data, k=num_choices)): print(f"testing GA crossover function for fjs instance {fjs_instance} ({i + 1} of {num_choices})") instance_data = data.FJSData(fjs_instance) try: for _ in range(50): parent1 = SolutionFactory(instance_data).get_solution() parent2 = SolutionFactory(instance_data).get_solution() crossover(parent1, parent2, probability_mutation, instance_data.job_task_index_matrix, instance_data.usable_machines_matrix) crossover(parent2, parent1, probability_mutation, instance_data.job_task_index_matrix, instance_data.usable_machines_matrix) except InfeasibleSolutionException: self.fail("Infeasible child created")
def test_create_fjs_data(self): fjs_lst = get_files_with_suffix(project_root / 'data/fjs_data', '.fjs') for i, fjs_instance in enumerate(fjs_lst): print( f"testing fjs instance {fjs_instance} ({i + 1} of {len(fjs_lst)})" ) fjs_data = data.FJSData(fjs_instance) self.assertIsNotNone(fjs_data.fjs_file_path) self.assertIsNotNone(fjs_data.sequence_dependency_matrix) self.assertIsNotNone(fjs_data.job_task_index_matrix) self.assertIsNotNone(fjs_data.usable_machines_matrix) self.assertIsNotNone(fjs_data.task_processing_times_matrix) self.assertNotEqual([], fjs_data.jobs) self.assertIsNotNone(fjs_data.total_number_of_jobs) self.assertIsNotNone(fjs_data.total_number_of_tasks) self.assertIsNotNone(fjs_data.total_number_of_machines) self.assertIsNotNone(fjs_data.max_tasks_for_a_job)
def test_ts_iter_benchmark(self): # parameters iterations = 50 # keep this value small num_processes = 1 tabu_list_size = 10 neighborhood_size = 25 neighborhood_wait = 0.1 probability_change_machine = 0.8 for i, fjs_instance in enumerate(fjs_data): print( f"testing fjs instance {fjs_instance} ({i + 1} of {len(fjs_data)})" ) try: data_instance = data.FJSData(fjs_instance) solver = Solver(data_instance) solver.tabu_search_iter( iterations, num_solutions_per_process=1, num_processes=num_processes, tabu_list_size=tabu_list_size, neighborhood_size=neighborhood_size, neighborhood_wait=neighborhood_wait, probability_change_machine=probability_change_machine, benchmark=True) except Exception as e: self.fail( f'Unexpected exception raised while running TS for {fjs_instance}:' + str(e)) self.assertIsNotNone(solver.solution, "TS should have produced a best solution") # output results output_file = tmp_dir / 'fjs_ts_benchmark' solver.output_benchmark_results(output_file, auto_open=False) self.assertTrue(output_file.exists(), "fjs_ts_benchmark was not produced")
def test_converting_fjs_instances(self): fjs_lst = get_files_with_suffix(project_root / 'data/fjs_data', '.fjs') for i, fjs_instance in enumerate(fjs_lst): print( f"testing fjs instance {fjs_instance} ({i + 1} of {len(fjs_lst)})" ) fjs_data = data.FJSData(fjs_instance) # copy all of the data that was read in sequence_dependency_matrix = np.copy( fjs_data.sequence_dependency_matrix) job_task_index_matrix = np.copy(fjs_data.job_task_index_matrix) usable_machines_matrix = np.copy(fjs_data.usable_machines_matrix) task_processing_times_matrix = np.copy( fjs_data.task_processing_times_matrix) jobs = fjs_data.jobs[:] total_number_of_jobs = fjs_data.total_number_of_jobs total_number_of_tasks = fjs_data.total_number_of_tasks total_number_of_machines = fjs_data.total_number_of_machines max_tasks_for_a_job = fjs_data.max_tasks_for_a_job data.Data.convert_fjs_to_csv(fjs_instance, tmp_dir) # read in converted csv file csv_data = data.CSVData(tmp_dir / 'sequenceDependencyMatrix.csv', tmp_dir / 'machineRunSpeed.csv', tmp_dir / 'jobTasks.csv') # make sure the data is the same np.testing.assert_array_equal( sequence_dependency_matrix, csv_data.sequence_dependency_matrix, err_msg= f'sequence dependency matrices are not equal for {fjs_instance}' ) np.testing.assert_array_equal( job_task_index_matrix, csv_data.job_task_index_matrix, err_msg= f'job-task index matrices are not equal for {fjs_instance}') np.testing.assert_array_equal( usable_machines_matrix, csv_data.usable_machines_matrix, err_msg= f'usable machines matrices are not equal for {fjs_instance}') # TODO task_processing_times_matrix will not always be equal because of the way Data.convert_fjs_to_csv is implemented # np.testing.assert_array_equal(task_processing_times_matrix, csv_data.task_processing_times_matrix) self.assertEqual( task_processing_times_matrix.shape, csv_data.task_processing_times_matrix.shape, f'task processing times matrices are not same shape for {fjs_instance}' ) self.assertEqual(jobs, csv_data.jobs, f'jobs lists are not equal for {fjs_instance}') self.assertEqual( total_number_of_jobs, csv_data.total_number_of_jobs, f'total number of jobs are not equal for {fjs_instance}') self.assertEqual( total_number_of_tasks, csv_data.total_number_of_tasks, f'total number of tasks are not equal for {fjs_instance}') self.assertEqual( total_number_of_machines, csv_data.total_number_of_machines, f'total number of machines are not equal for {fjs_instance}') self.assertEqual( max_tasks_for_a_job, csv_data.max_tasks_for_a_job, f'max tasks for a job are not equal for {fjs_instance}')