def _prepare_photon_files(self, comm=None): #thisProcess = comm.rank thisProcess = 0 #numProcesses = comm.size numProcesses = 1 # Prepare for reading input. if os.path.isdir(self.input_path): photonFiles = [ os.path.join(self.input_path, pf) for pf in os.listdir(self.input_path) ] photonFiles.sort() elif os.path.isfile(self.input_path): photonFiles = [self.input_path] else: raise IOError(" Input file %s not found." % self.input_path) gen = EMCCaseGenerator(self._outputLog) gen.readGeomFromPhotonData(photonFiles[0], thisProcess) if thisProcess == 0: os.system("touch %s" % self._lockFile) gen.writeDetectorToFile(filename=self._detectorFile) gen.writeSparsePhotonFile( photonFiles, self._sparsePhotonFile + "_" + str(thisProcess), self._avgPatternFile + "_" + str(thisProcess), thisProcess, numProcesses) #comm.Barrier() if thisProcess == 0: self._join_photon_files(numProcesses) #comm.Barrier() if thisProcess == 0: _print_to_log( msg="Sparse photons file created. Deleting lock file now", log_file=self._outputLog) # _print_to_log(msg="Detector parameters: %d %d %d"%(gen.qmax, len(gen.detector), len(gen.beamstop)), log_file=self._outputLog) os.system("rm %s " % self._lockFile)
def _need_prepare_photon_files(self, thisProcess): ############################################################### # A lock file is created if subprocess is converting sparse photons # so that another subprocess does not clobber an ongoing conversion. # Make photons.dat and detector.dat if they don't exist. # Create time-tagged output subdirectory for intermediate states. ############################################################### while (os.path.isfile(self._lockFile)): # Sleep in 30 s intervals, then check if sparse photon lock has been released. sleep_duration = 30 msg = "Lock file in " + self._tmp_out_dir + ". " msg += "Photons.dat likely being written to tmpDir by another programm. " msg += "Sleeping this programm for %d s." % sleep_duration if thisProcess == 0: _print_to_log(msg, log_file=self._outputLog) time.sleep(sleep_duration) return not (os.path.isfile(self._sparsePhotonFile) and os.path.isfile(self._detectorFile))
def _run(self): """ """ """ Private method to run the Expand-Maximize-Compress (EMC) algorithm. :return: 0 if EMC returns successfully, 1 if not. :note: Copied and adapted from the main routine in s2e_recon/EMC/runEMC.py """ import mpi4py.rc mpi4py.rc.finalize = False from mpi4py import MPI # MPI info comm = MPI.COMM_WORLD thisProcess = comm.rank if self._need_prepare_photon_files(thisProcess): if thisProcess == 0: msg = "Photons.dat and detector.dat not found in " + self._tmp_out_dir + ". Will create them now..." _print_to_log(msg=msg, log_file=self._outputLog) self._prepare_photon_files(comm) else: if thisProcess == 0: msg = "Photons.dat and detector.dat already exists in " + self._tmp_out_dir + "." _print_to_log(msg=msg, log_file=self._outputLog) # the rest is non-parallel (yet) if thisProcess != 0: MPI.Finalize() return 0 ############################################################### # Instantiate a reconstruction object ############################################################### # If parameters are given, map them to command line arguments. initial_number_of_quaternions = self.parameters.initial_number_of_quaternions max_number_of_quaternions = self.parameters.max_number_of_quaternions max_number_of_iterations = self.parameters.max_number_of_iterations min_error = self.parameters.min_error beamstop = self.parameters.beamstop detailed_output = self.parameters.detailed_output quaternion_dir = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'CalculatorUtilities', 'quaternions') gen = EMCCaseGenerator(self._outputLog) gen.readGeomFromDetectorFile(self._detectorFile) _print_to_log(msg="Detector parameters: %d %d %d" % (gen.qmax, len(gen.detector), len(gen.beamstop)), log_file=self._outputLog) if not (os.path.isfile( os.path.join(self._run_instance_dir, "detector.dat"))): os.symlink(os.path.join(self._tmp_out_dir, "detector.dat"), os.path.join(self._run_instance_dir, "detector.dat")) if not (os.path.isfile( os.path.join(self._run_instance_dir, "photons.dat"))): os.symlink(os.path.join(self._tmp_out_dir, "photons.dat"), os.path.join(self._run_instance_dir, "photons.dat")) ############################################################### # Create dummy destination h5 for intermediate output from EMC ############################################################### cwd = os.path.abspath(os.curdir) os.chdir(self._run_instance_dir) #Output file is kept in tmpOutDir. outFile = self.output_path offset_iter = 0 if not (os.path.isfile(outFile)): f = h5py.File(outFile, "w") f.create_group("data") f.create_group("misc") f.create_group("info") f.create_group("params") f.create_group("history") gg = f["history"] gg.create_group("intensities") gg.create_group("error") gg.create_group("angle") gg.create_group("mutual_info") gg.create_group("quaternion") gg.create_group("time") c = numpy.array([gen.qmax, gen.qmax, gen.qmax]) f.create_dataset("data/center", data=c) f.create_dataset("misc/qmax", data=gen.qmax) f.create_dataset("misc/detector", data=gen.detector) f.create_dataset("misc/beamstop", data=gen.beamstop) f.create_dataset("version", data=h5py.version.hdf5_version) f.close() else: f = h5py.File(outFile, 'r') offset_iter = len(list(f["/history/intensities"].keys())) f.close() msg = "Output will be appended to the results of %d iterations before this." % offset_iter _print_to_log(msg=msg, log_file=self._outputLog) ############################################################### # Iterate EMC ############################################################### intensL = 2 * gen.qmax + 1 iter_num = 1 currQuat = initial_number_of_quaternions try: while (currQuat <= max_number_of_quaternions): if os.path.isfile( os.path.join(self._run_instance_dir, "quaternion.dat")): os.remove( os.path.join(self._run_instance_dir, "quaternion.dat")) os.symlink( os.path.join(quaternion_dir, "quaternion" + str(currQuat) + ".dat"), os.path.join(self._run_instance_dir, "quaternion.dat")) diff = 1. while (iter_num <= max_number_of_iterations): if (iter_num > 1 and diff < min_error): _print_to_log( msg= "Error %0.3e is smaller than threshold %0.3e. Going to next quaternion." % (diff, min_error), log_file=self._outputLog) break _print_to_log( "Beginning iteration %d, with quaternion %d %s" % (iter_num + offset_iter, currQuat, "." * 20), log_file=self._outputLog) # Here is the actual timed EMC iteration, which calls the EMC.c code. start_time = time.clock() #command_sequence = ['EMC.x', '1'] command_sequence = ['EMC', '1'] process_handle = subprocess.Popen(command_sequence) process_handle.wait() time_taken = time.clock() - start_time _print_to_log("Took %lf s" % (time_taken), log_file=self._outputLog) # Read intermediate output of EMC.c and stuff them into a h5 file # Delete these EMC.c-generated intermediate files afterwards, # except finish_intensity.dat --> start_intensity.dat for next iteration. gen.intensities = (numpy.fromfile("finish_intensity.dat", sep=" ")).reshape( intensL, intensL, intensL) data_info = numpy.fromfile("mutual_info.dat", sep=" ") most_likely_orientations = numpy.fromfile( "most_likely_orientations.dat", sep=" ") if (os.path.isfile("start_intensity.dat")): intens1 = numpy.fromfile("start_intensity.dat", sep=" ") diff = numpy.sqrt( numpy.mean( numpy.abs(gen.intensities.flatten() - intens1)**2)) else: diff = 2. * min_error f = h5py.File(outFile, "a") gg = f["history/intensities"] if detailed_output: gg.create_dataset("%04d" % (iter_num + offset_iter), data=gen.intensities, compression="gzip", compression_opts=9) try: f.create_dataset("data/data", data=gen.intensities, compression="gzip", compression_opts=9) except: temp = f["data/data"] temp[...] = gen.intensities gg = f["history/error"] gg.create_dataset("%04d" % (iter_num + offset_iter), data=diff) _print_to_log("rms change in intensities %e" % (diff), log_file=self._outputLog) gg = f["history/angle"] gg.create_dataset("%04d" % (iter_num + offset_iter), data=most_likely_orientations, compression="gzip", compression_opts=9) try: f.create_dataset("data/angle", data=most_likely_orientations, compression="gzip", compression_opts=9) except: temp = f["data/angle"] temp[...] = most_likely_orientations gg = f["history/mutual_info"] gg.create_dataset("%04d" % (iter_num + offset_iter), data=data_info) gg = f["history/quaternion"] gg.create_dataset("%04d" % (iter_num + offset_iter), data=currQuat) gg = f["history/time"] gg.create_dataset("%04d" % (iter_num + offset_iter), data=time_taken) f.close() f = open(self._outputLog, "a") f.write("%e\t %lf\n" % (diff, time_taken)) f.close() os.system("cp finish_intensity.dat start_intensity.dat") _print_to_log("Iteration number %d completed" % (iter_num), log_file=self._outputLog) iter_num += 1 currQuat += 1 _print_to_log("All EMC iterations completed", log_file=self._outputLog) os.chdir(cwd) MPI.Finalize() return 0 except: os.chdir(cwd) MPI.Finalize() return 1