def write_results(results, cfg): # Write out the data write_scored_orientations(results, cfg) # grab working directory from config wdir = cfg.working_dir if not os.path.exists(cfg.analysis_dir): os.makedirs(cfg.analysis_dir) qbar_filename = os.path.join( wdir, 'accepted_orientations_' + cfg.analysis_id + '.dat' ) np.savetxt(qbar_filename, results['qbar'].T, fmt='%.18e', delimiter='\t') # ??? do we want to do this by default? gw = instrument.GrainDataWriter( os.path.join(cfg.analysis_dir, 'grains.out') ) for gid, q in enumerate(results['qbar'].T): phi = 2*np.arccos(q[0]) n = xfcapi.unitRowVector(q[1:]) grain_params = np.hstack([phi*n, const.zeros_3, const.identity_6x1]) gw.dump_grain(gid, 1., 0., grain_params) gw.close()
def generate_grains_table(qbar): num_grains = qbar.shape[1] grains_table = np.empty((num_grains, 21)) gw = instrument.GrainDataWriter(array=grains_table) for i, q in enumerate(qbar.T): phi = 2 * np.arccos(q[0]) n = xfcapi.unitRowVector(q[1:]) grain_params = np.hstack( [phi * n, constants.zeros_3, constants.identity_6x1]) gw.dump_grain(i, 1, 0, grain_params) gw.close() return grains_table
def run_fit_grains(self, config): self.fit_grains_results = None min_samples, mean_rpg = create_clustering_parameters( config, self.ome_maps) # Add fit_grains config dialog_config = HexrdConfig().indexing_config['fit_grains'] config.set('fitgrains:npdiv', dialog_config['npdiv']) config.set('fitgrains:refit', dialog_config['refit']) config.set('fitgrains:threshold', dialog_config['threshold']) config.set('fitgrains:tth_max', dialog_config['tth_max']) config.set('fitgrains:tolerance:tth', dialog_config['tth_tolerances']) config.set('fitgrains:tolerance:eta', dialog_config['eta_tolerances']) config.set('fitgrains:tolerance:omega', dialog_config['omega_tolerances']) kwargs = { 'compl': self.completeness, 'qfib': self.qfib, 'qsym': config.material.plane_data.getQSym(), 'cfg': config, 'min_samples': min_samples, 'compl_thresh': config.find_orientations.clustering.completeness, 'radius': config.find_orientations.clustering.radius } self.update_progress_text('Running clustering') qbar, cl = run_cluster(**kwargs) # Generate grains table num_grains = qbar.shape[1] if num_grains == 0: print('Fit Grains Complete - no grains were found') return shape = (num_grains, 21) grains_table = np.empty(shape) gw = instrument.GrainDataWriter(array=grains_table) for gid, q in enumerate(qbar.T): phi = 2 * np.arccos(q[0]) n = xfcapi.unitRowVector(q[1:]) grain_params = np.hstack( [phi * n, const.zeros_3, const.identity_6x1]) gw.dump_grain(gid, 1., 0., grain_params) gw.close() self.update_progress_text( f'Found {num_grains} grains. Running fit optimization.') self.fit_grains_results = fit_grains(config, grains_table, write_spots_files=False) print('Fit Grains Complete')
def write_results(results, cfg): # Write out the data np.savez_compressed('_'.join(['scored_orientations', cfg.analysis_id]), **results['scored_orientations']) if not os.path.exists(cfg.analysis_dir): os.makedirs(cfg.analysis_dir) qbar_filename = 'accepted_orientations_' + cfg.analysis_id + '.dat' np.savetxt(qbar_filename, results['qbar'].T, fmt='%.18e', delimiter='\t') gw = instrument.GrainDataWriter( os.path.join(cfg.analysis_dir, 'grains.out')) for gid, q in enumerate(results['qbar'].T): phi = 2 * np.arccos(q[0]) n = xfcapi.unitRowVector(q[1:]) grain_params = np.hstack([phi * n, const.zeros_3, const.identity_6x1]) gw.dump_grain(gid, 1., 0., grain_params) gw.close()
def run_fit_grains(self, config): min_samples, mean_rpg = create_clustering_parameters( config, self.ome_maps) kwargs = { 'compl': self.completeness, 'qfib': self.qfib, 'qsym': config.material.plane_data.getQSym(), 'cfg': config, 'min_samples': min_samples, 'compl_thresh': config.find_orientations.clustering.completeness, 'radius': config.find_orientations.clustering.radius } self.update_progress_text('Running clustering') qbar, cl = run_cluster(**kwargs) # Generate grains table num_grains = qbar.shape[1] if num_grains == 0: QMessageBox.warning(self.parent, 'No Grains', 'Clustering found no grains') return shape = (num_grains, 21) grains_table = np.empty(shape) gw = instrument.GrainDataWriter(array=grains_table) for gid, q in enumerate(qbar.T): phi = 2 * np.arccos(q[0]) n = xfcapi.unitRowVector(q[1:]) grain_params = np.hstack( [phi * n, const.zeros_3, const.identity_6x1]) gw.dump_grain(gid, 1., 0., grain_params) gw.close() self.update_progress_text( f'Found {num_grains} grains. Running fit optimization.') self.fit_grains_results = fit_grains(config, grains_table, write_spots_files=False) print('Fit Grains Complete')
def execute(args, parser): # load the configuration settings cfgs = config.open(args.yml) # configure logging to the console: log_level = logging.DEBUG if args.debug else logging.INFO if args.quiet: log_level = logging.ERROR logger = logging.getLogger('hexrd') logger.setLevel(log_level) ch = logging.StreamHandler() ch.setLevel(logging.CRITICAL if args.quiet else log_level) cf = logging.Formatter('%(asctime)s - %(message)s', '%y-%m-%d %H:%M:%S') ch.setFormatter(cf) logger.addHandler(ch) # if find-orientations has not already been run, do so: quats_f = os.path.join( cfgs[0].working_dir, 'accepted_orientations_%s.dat' % cfgs[0].analysis_id) if os.path.exists(quats_f): try: qbar = np.loadtxt(quats_f).T except (IOError): raise (RuntimeError, "error loading indexing results '%s'" % quats_f) else: logger.info("Missing %s, running find-orientations", quats_f) logger.removeHandler(ch) from hexrd.findorientations import find_orientations results = find_orientations(cfgs[0]) qbar = results['qbar'] logger.addHandler(ch) logger.info('=== begin fit-grains ===') clobber = args.force or args.clean for cfg in cfgs: # prepare the analysis directory if os.path.exists(cfg.analysis_dir) and not clobber: logger.error( 'Analysis "%s" at %s already exists.' ' Change yml file or specify "force"', cfg.analysis_name, cfg.analysis_dir) sys.exit() # make output directories instr = cfg.instrument.hedm if not os.path.exists(cfg.analysis_dir): os.makedirs(cfg.analysis_dir) for det_key in instr.detectors: os.mkdir(os.path.join(cfg.analysis_dir, det_key)) else: # make sure panel dirs exist under analysis dir for det_key in instr.detectors: if not os.path.exists(os.path.join(cfg.analysis_dir, det_key)): os.mkdir(os.path.join(cfg.analysis_dir, det_key)) logger.info('*** begin analysis "%s" ***', cfg.analysis_name) # configure logging to file for this particular analysis logfile = os.path.join(cfg.working_dir, cfg.analysis_name, 'fit-grains.log') fh = logging.FileHandler(logfile, mode='w') fh.setLevel(log_level) ff = logging.Formatter('%(asctime)s - %(name)s - %(message)s', '%m-%d %H:%M:%S') fh.setFormatter(ff) logger.info("logging to %s", logfile) logger.addHandler(fh) if args.profile: import cProfile as profile import pstats from io import StringIO pr = profile.Profile() pr.enable() grains_filename = os.path.join(cfg.analysis_dir, 'grains.out') # some conditions for arg handling existing_analysis = os.path.exists(grains_filename) new_with_estimate = not existing_analysis \ and cfg.fit_grains.estimate is not None new_without_estimate = not existing_analysis \ and cfg.fit_grains.estimate is None force_with_estimate = args.force \ and cfg.fit_grains.estimate is not None force_without_estimate = args.force and cfg.fit_grains.estimate is None # handle args if args.clean or force_without_estimate or new_without_estimate: # need accepted orientations from indexing in this case if args.clean: logger.info( "'clean' specified; ignoring estimate and using default") elif force_without_estimate: logger.info( "'force' option specified, but no initial estimate; " + "using default") try: gw = instrument.GrainDataWriter(grains_filename) for i_g, q in enumerate(qbar.T): phi = 2 * np.arccos(q[0]) n = xfcapi.unitRowVector(q[1:]) grain_params = np.hstack( [phi * n, cnst.zeros_3, cnst.identity_6x1]) gw.dump_grain(int(i_g), 1., 0., grain_params) gw.close() except (IOError): raise (RuntimeError, "indexing results '%s' not found!" % 'accepted_orientations_' + cfg.analysis_id + '.dat') elif force_with_estimate or new_with_estimate: grains_filename = cfg.fit_grains.estimate elif existing_analysis and not (clean or force): raise (RuntimeError, "fit results '%s' exist, " % grains_filename + "but --clean or --force options not specified") grains_table = np.loadtxt(grains_filename, ndmin=2) # process the data gid_list = None if args.grains is not None: gid_list = [int(i) for i in args.grains.split(',')] pass cfg.fit_grains.qbar = qbar fit_results = fit_grains( cfg, grains_table, show_progress=not args.quiet, ids_to_refine=gid_list, ) if args.profile: pr.disable() s = StringIO.StringIO() ps = pstats.Stats(pr, stream=s).sort_stats('cumulative') ps.print_stats(50) logger.info('%s', s.getvalue()) # stop logging for this particular analysis fh.flush() fh.close() logger.removeHandler(fh) logger.info('*** end analysis "%s" ***', cfg.analysis_name) write_results(fit_results, cfg, grains_filename) logger.info('=== end fit-grains ===') # stop logging to the console ch.flush() ch.close() logger.removeHandler(ch)
def fit_grains(cfg, force=False, clean=False, show_progress=False, ids_to_refine=None): """ Performs optimization of grain parameters. operates on a single HEDM config block """ grains_filename = os.path.join( cfg.analysis_dir, 'grains.out' ) # grab imageseries dict imsd = cfg.image_series # grab instrument instr = cfg.instrument.hedm # process plane data plane_data = cfg.material.plane_data tth_max = cfg.fit_grains.tth_max if isinstance(tth_max, bool): if tth_max: max_tth = instrument.max_tth(instr) plane_data.tThMax = max_tth logger.info("\tsetting the maximum 2theta to instrument" + " maximum: %.2f degrees", np.degrees(max_tth)) else: logger.info("\tnot adjusting exclusions in planeData") else: # a value for tth max has been specified plane_data.exclusions = None plane_data.tThMax = np.radians(tth_max) logger.info("\tsetting the maximum 2theta to %.2f degrees", tth_max) # make output directories if not os.path.exists(cfg.analysis_dir): os.mkdir(cfg.analysis_dir) for det_key in instr.detectors: os.mkdir(os.path.join(cfg.analysis_dir, det_key)) else: # make sure panel dirs exist under analysis dir for det_key in instr.detectors: if not os.path.exists(os.path.join(cfg.analysis_dir, det_key)): os.mkdir(os.path.join(cfg.analysis_dir, det_key)) # grab eta ranges and ome_period eta_ranges = np.radians(cfg.find_orientations.eta.range) # handle omega period # !!! we assume all detector ims have the same ome ranges, so any will do! oims = next(iter(imsd.values())) ome_period = np.radians(oims.omega[0, 0] + np.r_[0., 360.]) # number of processes ncpus = cfg.multiprocessing # threshold for fitting threshold = cfg.fit_grains.threshold # some conditions for arg handling existing_analysis = os.path.exists(grains_filename) new_with_estimate = not existing_analysis \ and cfg.fit_grains.estimate is not None new_without_estimate = not existing_analysis \ and cfg.fit_grains.estimate is None force_with_estimate = force and cfg.fit_grains.estimate is not None force_without_estimate = force and cfg.fit_grains.estimate is None # handle args if clean or force_without_estimate or new_without_estimate: # need accepted orientations from indexing in this case if clean: logger.info( "'clean' specified; ignoring estimate and using default" ) elif force_without_estimate: logger.info( "'force' option specified, but no initial estimate; " + "using default" ) try: qbar = np.loadtxt( 'accepted_orientations_' + cfg.analysis_id + '.dat', ndmin=2).T gw = instrument.GrainDataWriter(grains_filename) for i_g, q in enumerate(qbar.T): phi = 2*np.arccos(q[0]) n = xfcapi.unitRowVector(q[1:]) grain_params = np.hstack( [phi*n, cnst.zeros_3, cnst.identity_6x1] ) gw.dump_grain(int(i_g), 1., 0., grain_params) gw.close() except(IOError): raise(RuntimeError, "indexing results '%s' not found!" % 'accepted_orientations_' + cfg.analysis_id + '.dat') elif force_with_estimate or new_with_estimate: grains_filename = cfg.fit_grains.estimate elif existing_analysis and not (clean or force): raise(RuntimeError, "fit results '%s' exist, " % grains_filename + "but --clean or --force options not specified") # load grains table grains_table = np.loadtxt(grains_filename, ndmin=2) if ids_to_refine is not None: grains_table = np.atleast_2d(grains_table[ids_to_refine, :]) spots_filename = "spots_%05d.out" params = dict( grains_table=grains_table, plane_data=plane_data, instrument=instr, imgser_dict=imsd, tth_tol=cfg.fit_grains.tolerance.tth, eta_tol=cfg.fit_grains.tolerance.eta, ome_tol=cfg.fit_grains.tolerance.omega, npdiv=cfg.fit_grains.npdiv, refit=cfg.fit_grains.refit, threshold=threshold, eta_ranges=eta_ranges, ome_period=ome_period, analysis_dirname=cfg.analysis_dir, spots_filename=spots_filename) # ===================================================================== # EXECUTE MP FIT # ===================================================================== # DO FIT! if len(grains_table) == 1 or ncpus == 1: logger.info("\tstarting serial fit") start = timeit.default_timer() fit_grain_FF_init(params) fit_results = list( map(fit_grain_FF_reduced, np.array(grains_table[:, 0], dtype=int)) ) fit_grain_FF_cleanup() elapsed = timeit.default_timer() - start else: nproc = min(ncpus, len(grains_table)) chunksize = max(1, len(grains_table)//ncpus) logger.info("\tstarting fit on %d processes with chunksize %d", nproc, chunksize) start = timeit.default_timer() pool = multiprocessing.Pool( nproc, fit_grain_FF_init, (params, ) ) fit_results = pool.map( fit_grain_FF_reduced, np.array(grains_table[:, 0], dtype=int), chunksize=chunksize ) pool.close() pool.join() elapsed = timeit.default_timer() - start logger.info("fitting took %f seconds", elapsed) # ===================================================================== # WRITE OUTPUT # ===================================================================== gw = instrument.GrainDataWriter( os.path.join(cfg.analysis_dir, 'grains.out') ) for fit_result in fit_results: gw.dump_grain(*fit_result) pass gw.close()