def initialize(self): """ Initialise the processing """ # Get the start time start_time = time() # Ensure the reflections contain bounding boxes assert "bbox" in self.reflections, "Reflections have no bbox" # Split the reflections into partials self._split_reflections() # Create the reflection manager frames = self.experiments[0].scan.get_array_range() self.manager = ReflectionManagerPerImage(frames, self.reflections) # Parallel reading of HDF5 from the same handle is not allowed. Python # multiprocessing is a bit messed up and used fork on linux so need to # close and reopen file. for exp in self.experiments: if exp.imageset.reader().is_single_file_reader(): exp.imageset.reader().nullify_format_instance() # Set the initialization time self.time.initialize = time() - start_time
def initialize(self): ''' Initialise the processing ''' from dials_algorithms_integration_integrator_ext import ReflectionManagerPerImage from time import time # Get the start time start_time = time() # Ensure the reflections contain bounding boxes assert "bbox" in self.reflections, "Reflections have no bbox" # Split the reflections into partials self._split_reflections() # Create the reflection manager frames = self.experiments[0].scan.get_array_range() self.manager = ReflectionManagerPerImage(frames, self.reflections) # Parallel reading of HDF5 from the same handle is not allowed. Python # multiprocessing is a bit messed up and used fork on linux so need to # close and reopen file. from dxtbx.imageset import SingleFileReader for exp in self.experiments: if isinstance(exp.imageset.reader(), SingleFileReader): exp.imageset.reader().nullify_format_instance() # Set the initialization time self.time.initialize = time() - start_time
def initialize(self): """ Initialise the processing """ # Get the start time start_time = time() # Ensure the reflections contain bounding boxes assert "bbox" in self.reflections, "Reflections have no bbox" # Split the reflections into partials self._split_reflections() # Create the reflection manager frames = self.experiments[0].scan.get_array_range() self.manager = ReflectionManagerPerImage(frames, self.reflections) # Set the initialization time self.time.initialize = time() - start_time
class ManagerImage: """ A class to manage processing book-keeping """ def __init__(self, experiments, reflections, params): """ Initialise the manager. :param experiments: The list of experiments :param reflections: The list of reflections :param params: The phil parameters """ # Initialise the callbacks self.executor = None # Save some data self.experiments = experiments self.reflections = reflections # Save some parameters self.params = params # Set the finalized flag to False self.finalized = False # Initialise the timing information self.time = dials.algorithms.integration.TimingInfo() def initialize(self): """ Initialise the processing """ # Get the start time start_time = time() # Ensure the reflections contain bounding boxes assert "bbox" in self.reflections, "Reflections have no bbox" # Split the reflections into partials self._split_reflections() # Create the reflection manager frames = self.experiments[0].scan.get_array_range() self.manager = ReflectionManagerPerImage(frames, self.reflections) # Set the initialization time self.time.initialize = time() - start_time def task(self, index): """ Get a task. """ return Task( index=index, frames=self.manager.frames(index), reflections=self.manager.split(index), experiments=self.experiments, params=self.params, executor=self.executor, ) def tasks(self): """ Iterate through the tasks. """ for i in range(len(self)): yield self.task(i) def accumulate(self, result): """ Accumulate the results. """ self.manager.accumulate(result.index, result.reflections) if result.data is not None: self.executor.accumulate(result.index, result.data) self.time.read += result.read_time self.time.process += result.process_time self.time.total += result.total_time def finalize(self): """ Finalize the processing and finish. """ # Get the start time start_time = time() # Check manager is finished assert self.manager.finished(), "Manager is not finished" # Update the time and finalized flag self.time.finalize = time() - start_time self.finalized = True def result(self): """ Return the result. :return: The result """ assert self.finalized, "Manager is not finalized" return self.reflections def finished(self): """ Return if all tasks have finished. :return: True/False all tasks have finished """ return self.finalized and self.manager.finished() def __len__(self): """ Return the number of tasks. :return: the number of tasks """ return len(self.manager) def summary(self): return "" def _split_reflections(self): """ Split the reflections into partials or over job boundaries """ # Optionally split the reflection table into partials, otherwise, # split over job boundaries num_full = len(self.reflections) self.reflections.split_partials() num_partial = len(self.reflections) assert num_partial >= num_full, "Invalid number of partials" if num_partial > num_full: logger.info( " Split %d reflections into %d partial reflections\n", num_full, num_partial, )
class ManagerImage(object): ''' A class to manage processing book-keeping ''' def __init__(self, experiments, reflections, params): ''' Initialise the manager. :param experiments: The list of experiments :param reflections: The list of reflections :param params: The phil parameters ''' # Initialise the callbacks self.executor = None # Save some data self.experiments = experiments self.reflections = reflections # Save some parameters self.params = params # Set the finalized flag to False self.finalized = False # Initialise the timing information self.time = TimingInfo() def initialize(self): ''' Initialise the processing ''' from dials_algorithms_integration_integrator_ext import ReflectionManagerPerImage from time import time # Get the start time start_time = time() # Ensure the reflections contain bounding boxes assert "bbox" in self.reflections, "Reflections have no bbox" # Split the reflections into partials self._split_reflections() # Create the reflection manager frames = self.experiments[0].scan.get_array_range() self.manager = ReflectionManagerPerImage(frames, self.reflections) # Parallel reading of HDF5 from the same handle is not allowed. Python # multiprocessing is a bit messed up and used fork on linux so need to # close and reopen file. from dxtbx.imageset import SingleFileReader for exp in self.experiments: if isinstance(exp.imageset.reader(), SingleFileReader): exp.imageset.reader().nullify_format_instance() # Set the initialization time self.time.initialize = time() - start_time def task(self, index): ''' Get a task. ''' return Task(index=index, frames=self.manager.frames(index), reflections=self.manager.split(index), experiments=self.experiments, params=self.params, executor=self.executor) def tasks(self): ''' Iterate through the tasks. ''' for i in range(len(self)): yield self.task(i) def accumulate(self, result): ''' Accumulate the results. ''' self.manager.accumulate(result.index, result.reflections) if result.data is not None: self.executor.accumulate(result.index, result.data) self.time.read += result.read_time self.time.process += result.process_time self.time.total += result.total_time def finalize(self): ''' Finalize the processing and finish. ''' from time import time # Get the start time start_time = time() # Check manager is finished assert self.manager.finished(), "Manager is not finished" # Update the time and finalized flag self.time.finalize = time() - start_time self.finalized = True def result(self): ''' Return the result. :return: The result ''' assert self.finalized, "Manager is not finalized" return self.reflections def finished(self): ''' Return if all tasks have finished. :return: True/False all tasks have finished ''' return self.finalized and self.manager.finished() def __len__(self): ''' Return the number of tasks. :return: the number of tasks ''' return len(self.manager) def summary(self): return '' def _split_reflections(self): ''' Split the reflections into partials or over job boundaries ''' # Optionally split the reflection table into partials, otherwise, # split over job boundaries num_full = len(self.reflections) self.reflections.split_partials() num_partial = len(self.reflections) assert num_partial >= num_full, "Invalid number of partials" if num_partial > num_full: logger.info(' Split %d reflections into %d partial reflections\n' % (num_full, num_partial))
class ManagerImage(object): ''' A class to manage processing book-keeping ''' def __init__(self, experiments, reflections, params): ''' Initialise the manager. :param experiments: The list of experiments :param reflections: The list of reflections :param params: The phil parameters ''' # Initialise the callbacks self.executor = None # Save some data self.experiments = experiments self.reflections = reflections # Save some parameters self.params = params # Set the finalized flag to False self.finalized = False # Initialise the timing information self.time = TimingInfo() def initialize(self): ''' Initialise the processing ''' from dials_algorithms_integration_integrator_ext import ReflectionManagerPerImage from time import time # Get the start time start_time = time() # Ensure the reflections contain bounding boxes assert "bbox" in self.reflections, "Reflections have no bbox" # Split the reflections into partials self._split_reflections() # Create the reflection manager frames = self.experiments[0].scan.get_array_range() self.manager = ReflectionManagerPerImage(frames, self.reflections) # Parallel reading of HDF5 from the same handle is not allowed. Python # multiprocessing is a bit messed up and used fork on linux so need to # close and reopen file. from dxtbx.imageset import SingleFileReader for exp in self.experiments: if isinstance(exp.imageset.reader(), SingleFileReader): exp.imageset.reader().nullify_format_instance() # Set the initialization time self.time.initialize = time() - start_time def task(self, index): ''' Get a task. ''' return Task( index = index, frames = self.manager.frames(index), reflections = self.manager.split(index), experiments = self.experiments, params = self.params, executor = self.executor) def tasks(self): ''' Iterate through the tasks. ''' for i in range(len(self)): yield self.task(i) def accumulate(self, result): ''' Accumulate the results. ''' self.manager.accumulate(result.index, result.reflections) if result.data is not None: self.executor.accumulate(result.index, result.data) self.time.read += result.read_time self.time.process += result.process_time self.time.total += result.total_time def finalize(self): ''' Finalize the processing and finish. ''' from time import time # Get the start time start_time = time() # Check manager is finished assert self.manager.finished(), "Manager is not finished" # Update the time and finalized flag self.time.finalize = time() - start_time self.finalized = True def result(self): ''' Return the result. :return: The result ''' assert self.finalized, "Manager is not finalized" return self.reflections def finished(self): ''' Return if all tasks have finished. :return: True/False all tasks have finished ''' return self.finalized and self.manager.finished() def __len__(self): ''' Return the number of tasks. :return: the number of tasks ''' return len(self.manager) def summary(self): return '' def _split_reflections(self): ''' Split the reflections into partials or over job boundaries ''' # Optionally split the reflection table into partials, otherwise, # split over job boundaries num_full = len(self.reflections) self.reflections.split_partials() num_partial = len(self.reflections) assert num_partial >= num_full, "Invalid number of partials" if num_partial > num_full: logger.info(' Split %d reflections into %d partial reflections\n' % ( num_full, num_partial))