# Perform binary thresholding on the mask to remove any anti-aliasing ret, mask = cv2.threshold(mask, 1, 255, cv2.THRESH_BINARY) # Denoise the mask mask = SegmentationNetwork.denoise(mask) # Invert the mask, since Emma painted the particles black rather than white in the mask layer, # whereas the neural network uses classification label 0 for "not particle" and 1 for "particle" mask = 255 - mask # Open the file containing the RGB channel data channels = mergetiff.rasterFromFile(channelsFile) if channels.shape[2] > 3: channels = channels[:, :, 0:3] elif channels.shape[2] < 3: Logger.error('could not extract RGB channel data!') # Merge the RGB channels with the modified mask shape = (channels.shape[0], channels.shape[1], 4) merged = np.zeros(shape, dtype=channels.dtype) merged[:, :, 0:3] = channels merged[:, :, 3] = mask # Write the output file (convert RGB to BGR for OpenCV) if os.path.exists(outfile) == True: Logger.warning('overwriting existing file {}'.format(outfile)) cv2.imwrite(outfile, merged[..., [2, 1, 0, 3]]) # Progress output timer.end() Logger.success('preprocessing complete ({}).'.format(timer.report()))
class AthenaOpt(object): """ The primary class for AthenaOpt that controls program logic. """ def __init__(self): """ AthenaOpt constructor @return: None """ self._configuration = None self._logger = None self._directory = None def run(self, configuration): """ Run the optimization and place output files in the directory specified by the configuration. @param configuration: The optimization configuration object @type configuration: Configuration @return: The run history @rtype: History """ # Parse the configuration self._configuration = configuration self._directory = self._configuration.get_directory() if not path.exists(self._directory): makedirs(self._directory) self._logger = Logger(self._directory + '/' + configuration.get_log_file(), configuration.get_log_level(), configuration.get_console_log_level()) self._logger.info('Athena optimization run started') # Load the model if self._configuration.get_model() == 'ZDT1': model = ZDT1() self._logger.info('Model ZDT1 selected') elif self._configuration.get_model() == 'ZDT2': model = ZDT2() self._logger.info('Model ZDT2 selected') elif self._configuration.get_model() == 'ZDT3': model = ZDT3() self._logger.info('Model ZDT3 selected') elif self._configuration.get_model() == 'ZDT4': model = ZDT4() self._logger.info('Model ZDT4 selected') elif self._configuration.get_model() == 'ZDT6': model = ZDT6() self._logger.info('Model ZDT6 selected') elif self._configuration.get_model() == 'DTLZ1': model = DTLZ1() self._logger.info('Model DTLZ1 selected') elif self._configuration.get_model() == 'DTLZ2': model = DTLZ2() self._logger.info('Model DTLZ2 selected') elif self._configuration.get_model() == 'DTLZ3': model = DTLZ3() self._logger.info('Model DTLZ3 selected') elif self._configuration.get_model() == 'TNK': model = TNK() self._logger.info('Model TNK selected') elif self._configuration.get_model() == 'PACKING': model = PACKING() self._logger.info('Model PACKING selected') elif self._configuration.get_model() == 'POLONI': model = POLONI() self._logger.info('Model POLONI selected') else: self._logger.error('Unable to determine model ' + self._configuration.get_model()) raise AthenaException('Unable to determine model ' + self._configuration.get_model()) # Load the algorithm if self._configuration.get_algorithm() == 'SERIAL': algorithm = Serial() self._logger.info('Algorithm SERIAL selected') elif self._configuration.get_algorithm() == 'ISLANDS': algorithm = Islands() self._logger.info('Algorithm ISLANDS selected') elif self._configuration.get_algorithm() == 'SPHERES': algorithm = Spheres() self._logger.info('Algorithm SPHERES selected') else: self._logger.error('Unable to determine algorithm ' + self._configuration.get_algorithm()) raise AthenaException('Unable to determine algorithm ' + self._configuration.get_algorithm()) # Run the optimization self._logger.debug('Setting the logger to the algorithm') algorithm.set_logger(self._logger) self._logger.debug('Setting the model to the algorithm') algorithm.set_model(model) self._logger.info('Starting the algorithm') algorithm.parse_configuration(configuration) history, convergence, solution = algorithm.run() json_convergence = json.dumps(convergence, default=encode_convergence_metrics, sort_keys=True, indent=2) if configuration.get_write_history(): json_history = history.to_json() with open(self._directory + '/generations.out', 'w+') as data_file: data_file.write(json_history) if configuration.get_write_solution(): json_solution = json.dumps(solution, default=encode_individual, sort_keys=True, indent=2) with open(self._directory + '/solution.out', 'w+') as soln_file: soln_file.write(json_solution) with open(self._directory + '/convergence.out', 'w+') as convergence_file: convergence_file.write(json_convergence) with open(self._directory + '/configuration.config', 'w+') as config_file: config_file.write(configuration.to_json()) self._logger.info('Optimization finished') return history