예제 #1
0
    def print_results(self, final_table=None):
        """ Prints diagnostics from the final integration run. """

        assert self.info

        if not final_table:
            final_table = ["\n\n{:-^80}\n".format('ANALYSIS OF RESULTS')]

        if not self.info.categories['integrated']:
            final_table.append('NO IMAGES INTEGRATED!')
        else:
            label_lens = [len(v['label']) for k, v in self.info.stats.items()]
            max_label = int(5 * round(float(np.max(label_lens)) / 5)) + 5
            for k, v in self.info.stats.items():
                if k in ('lres', 'res', 'beamX', 'beamY'):
                    continue
                line = '{: <{l}}:  max = {:<6.2f} min = {:<6.2f} ' \
                       'avg = {:<6.2f} ({:<6.2f})' \
                       ''.format(v['label'], v['max'], v['min'],
                                 v['mean'], v['std'], l=max_label)
                final_table.append(line)

            # TODO: Figure out what to do with summary charts
            # # If more than one integrated image, plot various summary graphs
            # if len(self.info.categories['integrated']) > 1:
            #   plot = Plotter(self.params, self.info)
            #   if self.params.analysis.summary_graphs:
            #     if ( self.params.advanced.processing_backend == 'ha14' and
            #             self.params.cctbx_ha14.grid_search.type is not None
            #     ):
            #       plot.plot_spotfinding_heatmap(write_files=True)
            #     plot.plot_res_histogram(write_files=True)
            #     med_beamX, med_beamY, pixel_size = plot.plot_beam_xy(write_files=True,
            #                                                        return_values=True)
            #   else:
            #     with warnings.catch_warnings():
            #       # To catch any 'mean of empty slice' runtime warnings
            #       warnings.simplefilter("ignore", category=RuntimeWarning)
            #       beamXY_info = plot.calculate_beam_xy()
            #       beamX, beamY = beamXY_info[:2]
            #       med_beamX = np.median(beamX)
            #       med_beamY = np.median(beamY)
            #       pixel_size = beamXY_info[-1]

            final_table.append("{: <{l}}:  X = {:<4.2f}, Y = {:<4.2f}"
                               "".format('Median Beam Center',
                                         self.info.stats['beamX']['mean'],
                                         self.info.stats['beamY']['mean'],
                                         l=max_label))

            # Special entry for resolution last
            v = self.info.stats['res']
            final_table.append('{: <{l}}:  low = {:<6.2f} high = {:<6.2f} ' \
                               'avg = {:<6.2f} ({:<6.2f})' \
                               ''.format(v['label'], v['max'], v['min'],
                                         v['mean'], v['std'], l=max_label))

        for item in final_table:
            util.main_log(self.info.logfile, item, False)
        self.info.update(final_table=final_table)
예제 #2
0
파일: iota_run.py 프로젝트: dials/iota
    def process(self):
        """Run importer and/or processor."""

        start_time = time.time()

        # Process images
        with prog_message(
            "Processing {} images".format(len(self.info.unprocessed)),
            prog="PROCESSING",
            out_type=self.out_type,
        ):
            if self.out_type == "progress":
                self.prog_count = 0
                self.gs_prog = cmd.ProgressBar(title="PROCESSING")
            img_objects = self.run_process(iterable=self.info.unprocessed)

            proc_time = datetime.timedelta(seconds=int(time.time() - start_time))
            hours, minutes, seconds = str(proc_time).split(":")
            main_log(
                self.info.logfile,
                "Total processing time: {} hours, {} minutes, {} seconds"
                "".format(hours, minutes, seconds),
                print_tag=False,
            )

        # Run analysis if not in GUI mode
        if not "gui" in self.out_type:
            with prog_message(
                "Analyzing results", prog="ANALYSIS", out_type=self.out_type
            ):
                self.info.finished_objects = [o.obj_file for o in img_objects]
                self.run_analysis()

            if "silent" not in self.out_type:
                if self.info.have_results:
                    print("\n".join(self.info.final_table))
                    print("\n".join(self.info.uc_table))
                    print("\n".join(self.info.summary))
                else:
                    print("\n **** NO IMAGES INTEGRATED! ****")

        # Determine total runtime
        if not "gui" in self.out_type:
            runtime = datetime.timedelta(seconds=int(time.time() - start_time))
            hours, minutes, seconds = str(runtime).split(":")
            main_log(
                self.info.logfile,
                "Total run time: {} hours, {} minutes, {} seconds"
                "".format(hours, minutes, seconds),
                print_tag=True,
            )
예제 #3
0
    def import_and_process(self, input_entry):
        img_object = self.importer.run(input_entry)
        if img_object.status == 'imported':
            img_object = self.integrator.run(img_object)

        # Update main log
        if hasattr(self.info, 'logfile'):
            main_log_entry = "\n".join(img_object.log_info)
            util.main_log(self.info.logfile, main_log_entry)
            util.main_log(self.info.logfile, '\n{:-^100}\n'.format(''))

        # Set status to final
        img_object.status = 'final'
        return img_object
예제 #4
0
    def print_summary(self, write_files=True):
        """Prints summary and appends to general log file.

        Also outputs some of it on stdout. Also writes out output list
        files.
        """

        assert self.info

        if not self.info.categories["integrated"]:
            util.main_log(
                self.info.logfile,
                "NO IMAGES SUCCESSFULLY PROCESSSED!",
                (not self.gui_mode),
            )
            return

        summary = []
        summary.append("\n\n{:-^80}\n".format("SUMMARY"))
        categories = [
            "total",
            "failed_triage",
            "have_diffraction",
            "failed_spotfinding",
            "failed_indexing",
            "failed_grid_search",
            "failed_integration",
            "failed_filter",
            "integrated",
        ]
        for cat in categories:
            lst, fail, fn, _ = self.info.categories[cat]
            path = os.path.join(self.info.int_base, fn)
            if len(lst) > 0 or cat in ("integrated", "diffraction"):
                summary.append("{: <20}: {}".format("{} ".format(fail), len(lst)))
            with open(path, "w") as cf:
                for item in lst:
                    if isinstance(item, tuple) or isinstance(item, list):
                        item = ", ".join([str(i) for i in item])
                    cf.write("{}\n".format(item))
            if cat == "integrated" and write_files:
                if not hasattr(self, "prime_data_path"):
                    self.prime_data_path = path

        summary.append("\n\nIOTA version {0}".format(iota_version))
        summary.append("{}\n".format(now))

        for item in summary:
            util.main_log(self.info.logfile, "{}".format(item), False)
        self.info.update(summary=summary)
예제 #5
0
    def import_and_process(self, input_entry):
        img_object = self.importer.run(input_entry)
        if img_object.status == "imported":
            with util.Capturing() as junk_output:
                img_object = self.integrator.run(img_object)

        # Update main log
        if hasattr(self.info, "logfile"):
            main_log_entry = "\n".join(img_object.log_info)
            util.main_log(self.info.logfile, main_log_entry)
            util.main_log(self.info.logfile, "\n{:-^100}\n".format(""))

        # Set status to final
        img_object.status = "final"
        return img_object
예제 #6
0
    def process(self):
        """ Run importer and/or processor """

        start_time = time.time()

        # Process images
        with prog_message('Processing {} images'.format(
                len(self.info.unprocessed)),
                          prog='PROCESSING',
                          out_type=self.out_type):
            if self.out_type == 'progress':
                self.prog_count = 0
                self.gs_prog = cmd.ProgressBar(title='PROCESSING')
            img_objects = self.run_process(iterable=self.info.unprocessed)

            proc_time = datetime.timedelta(seconds=int(time.time() -
                                                       start_time))
            hours, minutes, seconds = str(proc_time).split(':')
            util.main_log(
                self.info.logfile,
                "Total processing time: {} hours, {} minutes, {} seconds"
                "".format(hours, minutes, seconds),
                print_tag=False)

        # Run analysis if not in GUI mode
        if not 'gui' in self.out_type:
            with prog_message('Analyzing results',
                              prog='ANALYSIS',
                              out_type=self.out_type):
                self.info.finished_objects = [o.obj_file for o in img_objects]
                self.run_analysis()

            if 'silent' not in self.out_type:
                if self.info.have_results:
                    print('\n'.join(self.info.final_table))
                    print('\n'.join(self.info.uc_table))
                    print('\n'.join(self.info.summary))
                else:
                    print('\n **** NO IMAGES INTEGRATED! ****')

        # Determine total runtime
        if not 'gui' in self.out_type:
            runtime = datetime.timedelta(seconds=int(time.time() - start_time))
            hours, minutes, seconds = str(runtime).split(':')
            util.main_log(self.info.logfile,
                          "Total run time: {} hours, {} minutes, {} seconds"
                          "".format(hours, minutes, seconds),
                          print_tag=True)
예제 #7
0
    def print_summary(self, write_files=True):
        """ Prints summary and appends to general log file. Also outputs some of it
        on stdout. Also writes out output list files.
    """

        assert self.info

        if not self.info.categories['integrated']:
            util.main_log(self.info.logfile,
                          "NO IMAGES SUCCESSFULLY PROCESSSED!",
                          (not self.gui_mode))
            return

        summary = []
        summary.append("\n\n{:-^80}\n".format('SUMMARY'))
        categories = [
            'total', 'failed_triage', 'have_diffraction', 'failed_spotfinding',
            'failed_indexing', 'failed_grid_search', 'failed_integration',
            'failed_filter', 'integrated'
        ]
        for cat in categories:
            lst, fail, fn, _ = self.info.categories[cat]
            path = os.path.join(self.info.int_base, fn)
            if len(lst) > 0 or cat in ('integrated', 'diffraction'):
                summary.append('{: <20}: {}'.format('{} '.format(fail),
                                                    len(lst)))
            with open(path, 'w') as cf:
                for item in lst:
                    if isinstance(item, tuple) or isinstance(item, list):
                        item = ', '.join([str(i) for i in item])
                    cf.write('{}\n'.format(item))
            if cat == 'integrated' and write_files:
                if not hasattr(self, 'prime_data_path'):
                    self.prime_data_path = path

        summary.append('\n\nIOTA version {0}'.format(iota_version))
        summary.append("{}\n".format(now))

        for item in summary:
            util.main_log(self.info.logfile, "{}".format(item), False)
        self.info.update(summary=summary)
예제 #8
0
  def initialize_main_log(self):
    """ Initialize main log (iota.log) and record starting parameters

    :return: True if successful, False if fails
    """
    try:
      # Generate text of params
      self.iota_phil_string = util.convert_phil_to_text(self.iota_phil)

      # Initialize main log
      self.logfile = os.path.abspath(os.path.join(self.int_base, 'iota.log'))

      # Log starting info
      util.main_log(self.logfile, '{:*^80} \n'.format(' IOTA MAIN LOG '))
      util.main_log(self.logfile, '{:-^80} \n'.format(' SETTINGS FOR THIS RUN '))
      util.main_log(self.logfile, self.iota_phil_string)

      # Log cctbx.xfel / DIALS settings
      util.main_log(self.logfile, '{:-^80} \n'.format('BACKEND SETTINGS'))
      util.main_log(self.logfile, self.target_phil)

      return True, 'IOTA_INIT: MAIN LOG INITIALIZED'
    except Exception as e:
      return False, 'IOTA_INIT_ERROR: MAIN LOG NOT INITIALIZED, {}'.format(e)
예제 #9
0
    def unit_cell_analysis(self):
        """ Calls unit cell analysis module, which uses hierarchical clustering
        (Zeldin, et al, Acta D, 2015) to split integration results according to
        detected morphological groupings (if any). Most useful with preliminary
        integration without target unit cell specified. """

        # Will not run clustering if only one integration result found or if turned off
        if not self.info.categories['integrated']:
            util.main_log(self.info.logfile,
                          "\n\n{:-^80}\n".format(' UNIT CELL ANALYSIS '), True)
            util.main_log(self.info.logfile,
                          '\n UNIT CELL CANNOT BE DETERMINED!', True)

        elif len(self.info.categories['integrated']) == 1:
            unit_cell = (self.info.cluster_iterable[0][:5])
            point_group = self.info.cluster_iterable[0][6]
            util.main_log(self.info.logfile,
                          "\n\n{:-^80}\n".format(' UNIT CELL ANALYSIS '), True)
            uc_line = "{:<6} {:^4}:  {:<6.2f}, {:<6.2f}, {:<6.2f}, {:<6.2f}, " \
                      "{:<6.2f}, {:<6.2f}".format('(1)', point_group,
                                                  unit_cell[0], unit_cell[1],
                                                  unit_cell[2],
                                                  unit_cell[3], unit_cell[4],
                                                  unit_cell[5])
            util.main_log(self.info.logfile, uc_line, True)

            self.info.best_pg = str(point_group)
            self.info.best_uc = unit_cell

        else:
            uc_table = []
            uc_summary = []

            if self.params.analysis.clustering.flag_on:
                # run hierarchical clustering analysis
                from xfel.clustering.cluster import Cluster

                counter = 0
                self.info.clusters = []

                threshold = self.params.analysis.clustering.threshold
                cluster_limit = self.params.analysis.clustering.limit
                final_pickles = self.info.categories['integrated'][0]

                pickles = []
                if self.params.analysis.clustering.n_images > 0:
                    import random

                    for i in range(
                            len(self.params.analysis.clustering.n_images)):
                        random_number = random.randrange(0, len(final_pickles))
                        if final_pickles[random_number] in pickles:
                            while final_pickles[random_number] in pickles:
                                random_number = random.randrange(
                                    0, len(final_pickles))
                            pickles.append(final_pickles[random_number])
                else:
                    pickles = final_pickles

                # Cluster from files (slow, but will keep for now)
                ucs = Cluster.from_files(pickle_list=pickles)

                # Do clustering
                clusters, _ = ucs.ab_cluster(threshold=threshold,
                                             log=False,
                                             write_file_lists=False,
                                             schnell=False,
                                             doplot=False)
                uc_table.append("\n\n{:-^80}\n" \
                                "".format(' UNIT CELL ANALYSIS '))

                # extract clustering info and add to summary output list
                if cluster_limit is None:
                    if len(pickles) / 10 >= 10:
                        cluster_limit = 10
                    else:
                        cluster_limit = len(pickles) / 10

                for cluster in clusters:
                    sorted_pg_comp = sorted(cluster.pg_composition.items(),
                                            key=lambda x: -1 * x[1])
                    pg_nums = [pg[1] for pg in sorted_pg_comp]
                    cons_pg = sorted_pg_comp[np.argmax(pg_nums)]

                    if len(cluster.members) > cluster_limit:
                        counter += 1

                        # Write to file
                        cluster_filenames = [j.path for j in cluster.members]
                        if self.params.analysis.clustering.write_files:
                            output_file = os.path.join(
                                self.info.int_base,
                                "uc_cluster_{}.lst".format(counter))
                            for fn in cluster_filenames:
                                with open(output_file, 'a') as scf:
                                    scf.write('{}\n'.format(fn))

                            mark_output = os.path.basename(output_file)
                        else:
                            mark_output = '*'
                            output_file = None

                    else:
                        mark_output = ''
                        output_file = None

                    # Populate clustering info for GUI display
                    uc_init = uctbx.unit_cell(cluster.medians)
                    symmetry = crystal.symmetry(unit_cell=uc_init,
                                                space_group_symbol='P1')
                    groups = metric_subgroups(input_symmetry=symmetry,
                                              max_delta=3)
                    top_group = groups.result_groups[0]
                    best_sg = str(groups.lattice_group_info()).split('(')[0]
                    best_uc = top_group['best_subsym'].unit_cell().parameters()
                    # best_sg = str(top_group['best_subsym'].space_group_info())

                    uc_no_stdev = "{:<6.2f} {:<6.2f} {:<6.2f} " \
                                  "{:<6.2f} {:<6.2f} {:<6.2f} " \
                                  "".format(best_uc[0], best_uc[1], best_uc[2],
                                            best_uc[3], best_uc[4], best_uc[5])
                    cluster_info = {
                        'number': len(cluster.members),
                        'pg': best_sg,
                        'uc': uc_no_stdev,
                        'filename': mark_output
                    }
                    self.info.clusters.append(cluster_info)

                    # format and record output
                    # TODO: How to propagate stdevs after conversion from Niggli?
                    # uc_line = "{:<6} {:^4}:  {:<6.2f} ({:>5.2f}), {:<6.2f} ({:>5.2f}), "\
                    #           "{:<6.2f} ({:>5.2f}), {:<6.2f} ({:>5.2f}), "\
                    #           "{:<6.2f} ({:>5.2f}), {:<6.2f} ({:>5.2f})   "\
                    #           "{}".format('({})'.format(len(cluster.members)), cons_pg[0],
                    #                                 cluster.medians[0], cluster.stdevs[0],
                    #                                 cluster.medians[1], cluster.stdevs[1],
                    #                                 cluster.medians[2], cluster.stdevs[2],
                    #                                 cluster.medians[3], cluster.stdevs[3],
                    #                                 cluster.medians[4], cluster.stdevs[4],
                    #                                 cluster.medians[5], cluster.stdevs[5],
                    #                                 mark_output)
                    # uc_table.append(uc_line)
                    uc_table.append("{:<6}:  {} {}".format(
                        len(cluster.members), uc_no_stdev, mark_output))
                    lattices = ', '.join(
                        ['{} ({})'.format(i[0], i[1]) for i in sorted_pg_comp])
                    # uc_info = [len(cluster.members), cons_pg[0], cluster.medians,
                    #            output_file, uc_line, lattices]
                    uc_info = [
                        len(cluster.members), best_sg, best_uc, output_file,
                        uc_no_stdev, lattices
                    ]
                    uc_summary.append(uc_info)

            else:
                # generate average unit cell
                uc_table.append("\n\n{:-^80}\n" \
                                "".format(' UNIT CELL AVERAGING (no clustering) '))
                uc_a, uc_b, uc_c, uc_alpha, \
                uc_beta, uc_gamma, uc_sg = list(zip(*self.info.cluster_iterable))
                cons_pg = Counter(uc_sg).most_common(1)[0][0]
                all_pgs = Counter(uc_sg).most_common()
                unit_cell = (np.median(uc_a), np.median(uc_b), np.median(uc_c),
                             np.median(uc_alpha), np.median(uc_beta),
                             np.median(uc_gamma))

                # Populate clustering info for GUI display
                uc_init = uctbx.unit_cell(unit_cell)
                symmetry = crystal.symmetry(unit_cell=uc_init,
                                            space_group_symbol='P1')
                groups = metric_subgroups(input_symmetry=symmetry, max_delta=3)
                top_group = groups.result_groups[0]
                best_sg = str(groups.lattice_group_info()).split('(')[0]
                best_uc = top_group['best_subsym'].unit_cell().parameters()
                # best_sg = str(top_group['best_subsym'].space_group_info())

                uc_no_stdev = "{:<6.2f} {:<6.2f} {:<6.2f} " \
                              "{:<6.2f} {:<6.2f} {:<6.2f} " \
                              "".format(best_uc[0], best_uc[1], best_uc[2],
                                        best_uc[3], best_uc[4], best_uc[5])
                cluster_info = {
                    'number': len(self.info.cluster_iterable),
                    'pg': best_sg,
                    'uc': uc_no_stdev,
                    'filename': None
                }
                self.info.clusters.append(cluster_info)

                # uc_line = "{:<6} {:^4}:  {:<6.2f} ({:>5.2f}), {:<6.2f} ({:>5.2f}), " \
                #           "{:<6.2f} ({:>5.2f}), {:<6.2f} ({:>5.2f}), " \
                #           "{:<6.2f} ({:>5.2f}), {:<6.2f} ({:>5.2f})   " \
                #           "{}".format('({})'.format(len(self.final_objects)), cons_pg,
                #                       np.median(uc_a), np.std(uc_a),
                #                       np.median(uc_b), np.std(uc_b),
                #                       np.median(uc_c), np.std(uc_c),
                #                       np.median(uc_alpha), np.std(uc_alpha),
                #                       np.median(uc_beta), np.std(uc_beta),
                #                       np.median(uc_gamma), np.std(uc_gamma), '')
                #
                # uc_table.append(uc_line)
                uc_table.append(uc_no_stdev)
                lattices = ', '.join(
                    ['{} ({})'.format(i[0], i[1]) for i in all_pgs])
                # uc_info = [len(self.final_objects), cons_pg, unit_cell, None,
                #            uc_line, lattices]
                uc_info = [
                    len(self.info.cluster_iterable), best_sg, best_uc, None,
                    uc_no_stdev, lattices
                ]
                uc_summary.append(uc_info)

            uc_table.append('\nMost common unit cell:\n')

            # select the most prevalent unit cell (most members in cluster)
            uc_freqs = [i[0] for i in uc_summary]
            uc_pick = uc_summary[np.argmax(uc_freqs)]
            uc_table.append(uc_pick[4])
            uc_table.append('\nBravais Lattices in Biggest Cluster: {}'
                            ''.format(uc_pick[5]))
            self.info.best_pg = str(uc_pick[1])
            self.info.best_uc = uc_pick[2]

            if uc_pick[3] is not None:
                self.prime_data_path = uc_pick[3]

            for item in uc_table:
                util.main_log(self.info.logfile, item, False)
            self.info.update(uc_table=uc_table)

            if self.gui_mode:
                return self.info.clusters
예제 #10
0
def initialize_processing(paramfile, run_no):
  ''' Initialize processing for a set of images
  :param paramfile: text file with IOTA parameters
  :param run_no: number of the processing run
  :return: info: INFO object
           params: IOTA params
  '''
  try:
    phil, _ = inp.get_input_phil(paramfile=paramfile)
  except Exception as e:
    msg = 'IOTA_PROC_ERROR: Cannot import IOTA parameters! {}'.format(e)
    return False, msg
  else:
    params = phil.extract()

  # Reconstruct integration base path and get info object
  int_base = os.path.join(params.output, 'integration/{:03d}'.format(run_no))
  try:
    info_file = os.path.join(int_base, 'proc.info')
    info = ProcInfo.from_json(filepath=info_file)
  except Exception as e:
    msg = 'IOTA_PROC_ERROR: Cannot import INFO object! {}'.format(e)
    return False, msg

  # Generate input list and input base
  if not hasattr(info, 'input_list'):
    info.generate_input_list(params=params)
  common_pfx = os.path.abspath(
    os.path.dirname(os.path.commonprefix(info.input_list)))

  input_base = common_pfx
  if os.path.isdir(os.path.abspath(params.input[0])):
    new_common_pfx = os.path.commonprefix([os.path.abspath(params.input[0]), common_pfx])
    if new_common_pfx not in ('', '.'):
      input_base = new_common_pfx

  # Generate subfolder paths
  paths = dict(obj_base=os.path.join(int_base, 'image_objects'),
               fin_base=os.path.join(int_base, 'final'),
               log_base=os.path.join(int_base, 'logs'),
               viz_base=os.path.join(int_base, 'visualization'),
               tmp_base=os.path.join(int_base, 'tmp'),
               input_base=input_base)
  # FIXME: ordering of items in dictionaries changes depending on python version
  for bkey, bvalue in paths.items():
    if bkey == "input_base":  # I think this is what the original code wanted to do
      continue
    if not os.path.isdir(bvalue):
      os.makedirs(bvalue)
  info.update(paths)

  # Generate filepaths for various info files
  info_files = dict(
    obj_list_file=os.path.join(info.tmp_base, 'finished_objects.lst'),
    idx_file=os.path.join(info.int_base, 'observations.pickle')
  )
  info.update(info_files)

  # Initialize stat containers
  info = generate_stat_containers(info=info, params=params)

    # Initialize main log
  util.main_log(info.logfile, '{:*^80} \n'.format(' IOTA MAIN LOG '))
  util.main_log(info.logfile, '{:-^80} \n'.format(' SETTINGS FOR THIS RUN '))
  util.main_log(info.logfile, info.iota_phil)
  util.main_log(info.logfile, '{:-^80} \n'.format('BACKEND SETTINGS'))
  util.main_log(info.logfile, info.target_phil)

  info.export_json()

  return info, params
  def print_summary(self, write_files=True):
    """ Prints summary and appends to general log file. Also outputs some of it
        on stdout. Also writes out output list files.
    """

    summary = []

    util.main_log(self.logfile,
                  "\n\n{:-^80}\n".format('SUMMARY'),
                  (not self.gui_mode))

    summary.append('raw images read in:                  {}'\
                   ''.format(len(self.all_objects)))
    summary.append('raw images with no diffraction:      {}'\
                   ''.format(len(self.no_diff_objects)))
    summary.append('raw images with diffraction:         {}'\
                   ''.format(len(self.diff_objects)))
    if self.params.advanced.processing_backend == 'ha14':
      summary.append('failed indexing / integration:       {}'\
                     ''.format(len(self.not_int_objects)))
      summary.append('failed prefilter:                    {}'\
                     ''.format(len(self.filter_fail_objects)))
    elif self.params.advanced.processing_backend == 'cctbx.xfel':
      summary.append('failed spotfinding:                  {}'\
                     ''.format(len(self.not_spf_objects)))
      summary.append('failed indexing:                     {}'\
                     ''.format(len(self.not_idx_objects)))
      summary.append('failed filter:                       {}'
                     ''.format(len(self.filter_fail_objects)))
      summary.append('failed integration:                  {}'\
                     ''.format(len(self.not_int_objects)))
    summary.append('final integrated pickles:            {}'\
                   ''.format(len(self.sorted_final_images)))

    for item in summary:
      util.main_log(self.logfile, "{}".format(item), (not self.gui_mode))

    util.main_log(self.logfile, '\n\nIOTA version {0}'.format(self.ver))
    util.main_log(self.logfile, "{}\n".format(self.now))


    # Write list files:
    if write_files:

      input_list_file = os.path.join(self.output_dir, 'input_images.lst')
      blank_images_file = os.path.join(self.output_dir, 'blank_images.lst')
      prefilter_fail_file = os.path.join(self.output_dir, 'failed_cctbx_prefilter.lst')
      spotfinding_fail_file = os.path.join(self.output_dir, 'failed_dials_spotfinding.lst')
      indexing_fail_file = os.path.join(self.output_dir, 'failed_dials_indexing.lst')
      not_integrated_file = os.path.join(self.output_dir, 'not_integrated.lst')
      integrated_file = os.path.join(self.output_dir, 'integrated.lst')
      int_images_file = os.path.join(self.output_dir, 'int_image_pickles.lst')
      analyzer_file = os.path.join(self.output_dir, 'analysis.pickle')

      if self.prime_data_path is None:
        self.prime_data_path = integrated_file

      if len(self.no_diff_objects) > 0:
        with open(blank_images_file, 'w') as bif:
          for obj in self.no_diff_objects:
            bif.write('{}\n'.format(obj.img_path))

      if len(self.diff_objects) > 0:
        with open(input_list_file, 'w') as ilf:
          for obj in self.diff_objects:
            ilf.write('{}\n'.format(obj.img_path))

      if len(self.not_int_objects) > 0:
        with open(not_integrated_file, 'w') as nif:
          for obj in self.not_int_objects:
            nif.write('{}\n'.format(obj.img_path))

      if len(self.filter_fail_objects) > 0:
        with open(prefilter_fail_file, 'w') as pff:
          for obj in self.filter_fail_objects:
            pff.write('{}\n'.format(obj.img_path))

      if self.params.advanced.processing_backend == 'cctbx.xfel':
        if len(self.not_spf_objects) > 0:
          with open(spotfinding_fail_file, 'w') as sff:
            for obj in self.not_spf_objects:
              sff.write('{}\n'.format(obj.img_path))
        if len(self.not_idx_objects) > 0:
          with open(indexing_fail_file, 'w') as iff:
            for obj in self.not_idx_objects:
              iff.write('{}\n'.format(obj.img_path))

      if len(self.final_objects) > 0:
        with open(integrated_file, 'w') as intf:
          for obj in self.sorted_final_images:
            intf.write('{}\n'.format(obj.final['final']))
        with open(int_images_file, 'w') as ipf:
          for obj in self.sorted_final_images:
            ipf.write('{}\n'.format(obj.final['img']))

      # Dump the Analyzer object into a pickle file for later fast recovery
      ep.dump(analyzer_file, self.analysis_result)
  def print_results(self, final_table=None):
    """ Prints diagnostics from the final integration run. """

    cons_s = Counter(self.s).most_common(1)[0][0]
    cons_h = Counter(self.h).most_common(1)[0][0]
    cons_a = Counter(self.a).most_common(1)[0][0]

    if final_table is None:
      final_table = ["\n\n{:-^80}\n".format('ANALYSIS OF RESULTS')]

      # In case no images were integrated
      if self.final_objects is None:
        final_table.append('NO IMAGES INTEGRATED!')
      else:
        if self.params.advanced.processing_backend == 'ha14':
          final_table.append("Avg. signal height:    {:<8.3f}  std. dev:   "
                             "{:<6.2f}  max: {:<3}  min: {:<3}  consensus: {:<3}"
                             "".format(np.mean(self.s), np.std(self.s),
                                       max(self.s), min(self.s), cons_s))
          final_table.append("Avg. spot height:      {:<8.3f}  std. dev:   "
                             "{:<6.2f}  max: {:<3}  min: {:<3}  consensus: {:<3}"
                             "".format(np.mean(self.h), np.std(self.h),
                                       max(self.h), min(self.h), cons_h))
          final_table.append("Avg. spot areas:       {:<8.3f}  std. dev:   "
                             "{:<6.2f}  max: {:<3}  min: {:<3}  consensus: {:<3}"
                            "".format(np.mean(self.a), np.std(self.a),
                                      max(self.a), min(self.a), cons_a))
        final_table.append("Avg. resolution:       {:<8.3f}  std. dev:   "
                           "{:<6.2f}  lowest: {:<6.3f}  highest: {:<6.3f}"
                          "".format(np.mean(self.hres), np.std(self.hres),
                                    max(self.hres), min(self.hres)))
        final_table.append("Avg. number of spots:  {:<8.3f}  std. dev:   {:<6.2f}"
                          "".format(np.mean(self.spots), np.std(self.spots)))
        final_table.append("Avg. mosaicity:        {:<8.3f}  std. dev:   {:<6.2f}"
                          "".format(np.mean(self.mos), np.std(self.mos)))

        # If more than one integrated image, plot various summary graphs
        if len(self.final_objects) > 1:
          plot = Plotter(self.params, self.final_objects, self.viz_dir)
          if self.params.analysis.summary_graphs:
            if ( self.params.advanced.processing_backend == 'ha14' and
                    self.params.cctbx_ha14.grid_search.type is not None
            ):
              plot.plot_spotfinding_heatmap(write_files=True)
            plot.plot_res_histogram(write_files=True)
            med_beamX, med_beamY, pixel_size = plot.plot_beam_xy(write_files=True,
                                                               return_values=True)
          else:
            with warnings.catch_warnings():
              # To catch any 'mean of empty slice' runtime warnings
              warnings.simplefilter("ignore", category=RuntimeWarning)
              beamXY_info = plot.calculate_beam_xy()
              beamX, beamY = beamXY_info[:2]
              med_beamX = np.median(beamX)
              med_beamY = np.median(beamY)
              pixel_size = beamXY_info[-1]

          final_table.append("Median Beam Center:    X = {:<4.2f}, Y = {:<4.2f}"
                             "".format(med_beamX, med_beamY))
          self.analysis_result.__setattr__('beamX_mm', med_beamX)
          self.analysis_result.__setattr__('beamY_mm', med_beamY)
          self.analysis_result.__setattr__('pixel_size', pixel_size)
      self.analysis_result.__setattr__('final_table', final_table)


    for item in final_table:
        util.main_log(self.logfile, item, (not self.gui_mode))
class Processor(object):
    """ Class for image integration (w/ grid search params) """
    def __init__(self,
                 params,
                 source_image=None,
                 output_image=None,
                 viz=None,
                 log=None,
                 tag='grid search',
                 tmp_base=None,
                 gain=1,
                 single_image=False):

        self.params = params
        self.img = source_image
        self.out_img = output_image
        self.min_sigma = self.params.cctbx_ha14.selection.min_sigma
        self.target = os.path.abspath(self.params.cctbx_ha14.target)
        self.viz = viz
        self.tag = tag
        self.int_log = log
        self.charts = self.params.analysis.charts
        self.tmp_base = tmp_base
        self.single_image = single_image
        self.method = self.params.mp.method
        self.queue = self.params.mp.queue

        self.args = [
            "target={}".format(self.target),
            "indexing.data={}".format(self.img), "spots_pickle=None",
            "subgroups_pickle=None", "refinements_pickle=None",
            "rmsd_tolerance=5.0", "mosflm_rmsd_tolerance=5.0",
            "integration.detector_gain={}".format(gain),
            "indexing.verbose_cv=True"
        ]

        # Add target unit cell if exists
        if self.params.cctbx_ha14.target_unit_cell is not None:
            t_uc = [
                str(i)
                for i in self.params.cctbx_ha14.target_unit_cell.parameters()
            ]
            self.args.extend(['target_cell="{}"'.format(' '.join(t_uc))])

        # Translate / add target lattice if exists
        t_lat = util.makenone(self.params.cctbx_ha14.target_lattice_type)
        if t_lat:
            if t_lat == 'triclinic':
                known_setting = 1
            elif t_lat == 'monoclinic':
                known_setting = 2
            elif t_lat in ('orthorhombic', 'rhombohedral'):
                known_setting = 5
            elif t_lat == 'tetragonal':
                known_setting = 9
            elif t_lat == 'hexagonal':
                known_setting = 12
            elif t_lat == 'cubic':
                known_setting = 22
            else:
                known_setting = None

            if known_setting:
                self.args.extend(['known_setting={}'.format(known_setting)])

        # Centering type if exists
        t_ctype = self.params.cctbx_ha14.target_centering_type
        if t_ctype is not None:
            self.args.extend(['target_cell_centring_type={}'.format(t_ctype)])

        # Resolution, if exists
        hires = self.params.cctbx_ha14.resolution_limits.high
        lowres = self.params.cctbx_ha14.resolution_limits.low
        if hires is None:
            hires = 1.5
        if lowres is None:
            lowres = 99.9
        self.args.extend([
            'force_method2_resolution_limit={}'.format(hires),
            'distl_lowres_limit={}'.format(lowres),
            'distl_highres_limit={}'.format(hires),
            'distl.res.inner={}'.format(lowres),
            'distl.res.outer={}'.format(hires)
        ])

        # Add gain (necessary now)
        self.args.extend(['integration.detector_gain={}'.format(gain)])

        with open(os.path.join(self.params.output, 'test.txt'), 'w') as tf:
            tf.write('\n'.join(self.args))

    def integrate(self, grid_point):
        """ Runs the integration module in cctbx.xfel; used by either grid-search or
        final integration function. """

        self.s = grid_point['sih']
        self.h = grid_point['sph']
        self.a = grid_point['spa']

        args = self.args

        # Generate advanced arguments (and PDF subfolder)
        if self.charts and self.tag == 'grid search':
            filename = os.path.basename(self.img).split('.')[0]
            pdf_folder = os.path.join(self.viz, 'pdf_{}/s{}_h{}_a{}'\
                         ''.format(filename, self.s, self.h, self.a))
            if not os.path.exists(pdf_folder):
                os.makedirs(pdf_folder)
            self.args.extend([
                "integration.enable_residual_map=True",
                "integration.enable_residual_scatter=True",
                "integration.mosaic.enable_AD14F7B=True",
                "integration.graphics_backend=pdf",
                "integration.pdf_output_dir={}".format(pdf_folder)
            ])
        if self.tag == 'integrate':
            args.append("indexing.completeness_pickle={}".format(self.out_img))

        #Actually run integration using iota.bulletproof
        error_message = ''
        with util.Capturing() as index_log:
            arguments = [
                "distl.minimum_signal_height={}".format(
                    str(self.s)), "distl.minimum_spot_height={}".format(
                        str(self.h)), "distl.minimum_spot_area={}".format(
                            str(self.a)), "indexing.open_wx_viewer=False"
            ] + list(args[1:])

            tmppath = os.path.join(self.tmp_base,
                                   str(uuid.uuid4()) + ".pickle")
            assert not os.path.exists(tmppath)

            # invoke the indexer in a way that will protect iota from any crashes
            command = "iota.bulletproof {} {} {}" \
                      "".format(tmppath, self.target, " ".join(arguments))
            try:
                easy_run.fully_buffered(command,
                                        join_stdout_stderr=True).show_stdout()
                if not os.path.exists(tmppath):
                    print(tmppath)
                    print(command)
                    raise Exception("Indexing failed for an unknown reason")

                # iota.bulletproof saves the needed results from indexing in a tmp file
                result = easy_pickle.load(tmppath)
                os.remove(tmppath)
                if isinstance(result, str):
                    raise Exception(result)
                else:
                    int_final = result

            except Exception, e:
                int_final = None
                if hasattr(e, "classname"):
                    print(
                        e.classname,
                        "for %s:" % self.img,
                    )
                    error_message = "{}: {}".format(
                        e.classname, e[0].replace('\n', ' ')[:50])
                else:
                    print("Integration error for %s:" % self.img, )
                    error_message = "{}".format(str(e).replace('\n', ' ')[:50])
                print(e)

        # Output results of integration (from the "info" object returned by
        # run_one_index_core)
        if int_final is None:
            if error_message != '':
                reason_for_failure = " - {}".format(error_message)
            else:
                reason_for_failure = ''
            int_status = 'not integrated' + reason_for_failure
            int_results = {'info': int_status}
        elif int_final['observations'][0] is None:
            int_status = 'no data recorded'
            int_results = {'info': int_status}
        else:
            try:
                obs = int_final['observations'][0]
                cell = obs.unit_cell().parameters()
                sg = int_final['pointgroup']
                lres, hres = obs.d_max_min()

                # Calculate number of spots w/ high I / sigmaI
                Is = obs.data()
                sigmas = obs.sigmas()
                I_over_sigI = Is / sigmas
                #spots = len(Is)
                strong_spots = len(
                    [i for i in I_over_sigI if i >= self.min_sigma])

                # Mosaicity parameters
                mosaicity = round(
                    (int_final.get('ML_half_mosaicity_deg', [0])[0]), 6)
                dom_size = int_final.get('ML_domain_size_ang', [0])[0]
                ewald_proximal_volume = int_final.get('ewald_proximal_volume',
                                                      [0])[0]

                # Assemble output for log file and/or integration result file
                p_cell = "{:>6.2f}, {:>6.2f}, {:>6.2f}, {:>6.2f}, {:>6.2f}, {:>6.2f}"\
                       "".format(cell[0], cell[1], cell[2], cell[3], cell[4], cell[5])

                int_status = 'RES: {:<4.2f}  NSREF: {:<4}  SG: {:<5}  CELL: {}'\
                             ''.format(hres, strong_spots, sg, p_cell)

                int_results = {
                    'sg': sg,
                    'a': cell[0],
                    'b': cell[1],
                    'c': cell[2],
                    'alpha': cell[3],
                    'beta': cell[4],
                    'gamma': cell[5],
                    'wavelength': int_final['wavelength'],
                    'distance': int_final['distance'],
                    'beamX': int_final['xbeam'],
                    'beamY': int_final['ybeam'],
                    'strong': strong_spots,
                    'res': hres,
                    'lres': lres,
                    'mos': mosaicity,
                    'epv': ewald_proximal_volume,
                    'info': int_status,
                    'ok': True
                }
            except ValueError:
                print(self.img)

        # write integration logfile
        if self.tag == 'integrate':
            util.main_log(self.int_log,
                          "{:-^100}\n{:-^100}\n{:-^100}\n"\
                          "".format("", " FINAL INTEGRATION: ", ""\
                          "S = {:>2}, H ={:>2}, A ={:>2} "\
                          "".format(self.s, self.h, self.a)))
        else:
            util.main_log(self.int_log,
                          "{:-^100}\n".format(" INTEGRATION: "\
                          "S = {:>2}, H ={:>2}, A ={:>2} "\
                          "".format(self.s, self.h, self.a)))
        for item in index_log:
            util.main_log(self.int_log, item)

        util.main_log(self.int_log, "\n[ {:^100} ]\n\n".format(int_status))

        # In single-image mode, write a file with h, k, l, I, sigma
        if self.single_image == True and self.tag == 'integrate':
            hklI_filename = "{}.{}".format(
                os.path.basename(self.out_img).split('.')[0], 'hkli')
            hklI_file = os.path.join(os.path.dirname(self.out_img),
                                     hklI_filename)
            hklI = zip(obs.indices(), obs.data(), obs.sigmas())
            for i in hklI:
                with open(hklI_file, 'a') as f:
                    entry = '{},{},{},{},{}'.format(i[0][0], i[0][1], i[0][2],
                                                    i[1], i[2])
                    f.write('{}\n'.format(entry))

        return int_results
    def process(self, img_object, single_image=False):
        """ Image processing; selects method, runs requisite modules """
        self.img_object = img_object

        if self.img_object.status != 'bypass grid search':
            self.img_object.status = 'processing'

        #for CCTBX indexing / integration
        terminate = False
        prev_status = self.img_object.status
        prev_fail = 'first cycle'
        prev_final = self.img_object.final
        prev_epv = 9999

        while not terminate:
            # Run grid search if haven't already
            if (self.img_object.fail is None
                    and 'grid search' not in self.img_object.status):
                self.integrate_cctbx('grid search', single_image=single_image)

            # Run selection if haven't already
            if (self.img_object.fail is None
                    and self.img_object.status != 'selection'):
                self.select_cctbx()

            # If smart grid search is active run multiple rounds until convergence
            if self.params.cctbx_ha14.grid_search.type == 'smart':
                if (self.img_object.fail is None
                        and self.img_object.final['epv'] < prev_epv):
                    prev_epv = self.img_object.final['epv']
                    prev_final = self.img_object.final
                    prev_status = self.img_object.status
                    prev_fail = self.img_object.fail
                    self.hmed = self.img_object.final['sph']
                    self.amed = self.img_object.final['spa']
                    self.img_object.generate_grid()
                    self.img_object.final['final'] = self.img_object.int_file
                    if len(self.img_object.grid) == 0:
                        self.img_object.final = prev_final
                        self.img_object.status = prev_status
                        self.img_object.fail = prev_fail
                        terminate = True
                        continue
                    if self.verbose:
                        log_entry = '\nNew starting point: H = {}, A = {}\n'\
                                    ''.format(self.hmed, self.amed)
                        self.img_object.log_info.append(log_entry)
                else:
                    if prev_fail != 'first cycle':
                        self.img_object.final = prev_final
                        self.img_object.status = prev_status
                        self.img_object.fail = prev_fail
                        if self.verbose:
                            log_entry = '\nFinal set of parameters: H = {}, A = {}'\
                                        ''.format(self.img_object.final['sph'],
                                                  self.img_object.final['spa'])
                            self.img_object.log_info.append(log_entry)
                    terminate = True

            # If brute force grid search is selected run one round
            else:
                terminate = True

        # Run final integration if haven't already
        if self.img_object.fail is None and self.img_object.status != 'final':
            self.integrate_cctbx('integrate', single_image=single_image)

        # If verbose output selected (default), write to main log
        if self.verbose:
            log_entry = "\n".join(self.img_object.log_info)
            util.main_log(self.init.logfile, log_entry)
            util.main_log(self.init.logfile, '\n\n')

        # Make a temporary process log into a final process log
        if os.path.isfile(self.img_object.int_log):
            l_fname = util.make_filename(self.img_object.int_log,
                                         new_ext='log')
            final_int_log = os.path.join(self.img_object.log_path, l_fname)
            os.rename(self.img_object.int_log, final_int_log)

        # Save results into a pickle file
        self.img_object.status = 'final'
        from libtbx import easy_pickle as ep
        ep.dump(self.img_object.obj_file, self.img_object)

        return self.img_object
예제 #15
0
    def process(self):
        """ Run Process module from command line """

        if self.stage == 'import':  # Import Images
            self.create_image_iterable()

            if self.init.params.cctbx_ha14.selection.select_only.flag_on:
                msg = "Reading {} image objects".format(
                    len(self.init.gs_img_objects))
                title = 'READING IMAGE OBJECTS'
            else:
                msg = "Importing {} images".format(len(self.init.input_list))
                title = 'IMPORTING IMAGES'
            cmd.Command.start(msg)
            self.prog_count = 0
            self.gs_prog = cmd.ProgressBar(title=title)
            self.run_process()

            # Remove rejected images from image object list
            acc_img_objects = [
                i.fail for i in self.img_objects if i.fail is None
            ]
            cmd.Command.end("Accepted {} of {} images -- DONE " \
                            "".format(len(acc_img_objects), len(self.img_objects)))

            # Exit if none of the images have diffraction
            if str(self.init.params.cctbx_ha14.image_triage.type).lower(
            ) != 'none':
                if len(acc_img_objects) == 0:
                    util.main_log(self.init.logfile,
                                  'No images have diffraction!', True)
                    util.iota_exit()
                else:
                    util.main_log(
                        self.init.logfile,
                        "{} out of {} images have diffraction "
                        "(at least {} Bragg peaks)"
                        "".format(
                            len(acc_img_objects), len(self.img_objects),
                            self.init.params.cctbx_ha14.image_triage.
                            min_Bragg_peaks))
                    self.stage = 'process'

            # Check for -c option and exit if true
            if self.init.params.cctbx_ha14.image_conversion.convert_only:
                util.iota_exit()

        # Process Images
        self.create_image_iterable()
        cmd.Command.start("Processing {} images".format(len(self.iterable)))
        self.prog_count = 0
        self.gs_prog = cmd.ProgressBar(title='PROCESSING')
        self.run_process()
        cmd.Command.end("Processing {} images -- DONE".format(
            len(self.iterable)))

        # Analysis of integration results
        final_objects = [i for i in self.img_objects if i.fail is None]
        if len(final_objects) > 0:
            self.run_analysis()

            # Write info object / file (TODO: Revisit this later!)
            from iota.components.iota_threads import ObjectReaderThread
            self.object_reader = ObjectReaderThread(
                self,
                info=self.init.info,
                source=self.img_objects,
                info_file=self.init.info_file)
            self.object_reader.start()

        else:
            print('No images successfully integrated!')

        # Exit IOTA
        util.iota_exit()
예제 #16
0
파일: iota_init.py 프로젝트: dials/iota
def initialize_processing(paramfile, run_no):
    """Initialize processing for a set of images.

    :param paramfile: text file with IOTA parameters
    :param run_no: number of the processing run
    :return: info: INFO object
             params: IOTA params
    """
    try:
        phil, _ = inp.get_input_phil(paramfile=paramfile)
    except Exception as e:
        msg = "IOTA_PROC_ERROR: Cannot import IOTA parameters! {}".format(e)
        return False, msg
    else:
        params = phil.extract()

    # Reconstruct integration base path and get info object
    int_base = os.path.join(params.output, "integration/{:03d}".format(run_no))
    try:
        info_file = os.path.join(int_base, "proc.info")
        info = ProcInfo.from_json(filepath=info_file)
    except Exception as e:
        msg = "IOTA_PROC_ERROR: Cannot import INFO object! {}".format(e)
        return False, msg

    # Generate input list and input base
    if not hasattr(info, "input_list"):
        info.generate_input_list(params=params)
    filepath_list = []
    for item in info.input_list:
        if isinstance(item, list) or isinstance(item, tuple):
            fp = [i for i in item if os.path.exists(str(i))]
            if fp and len(fp) == 1:
                filepath_list.append(fp[0])
        else:
            if os.path.exists(item):
                filepath_list.append(item)
    common_pfx = os.path.abspath(
        os.path.dirname(os.path.commonprefix(filepath_list)))

    input_base = common_pfx
    if os.path.isdir(os.path.abspath(params.input[0])):
        new_common_pfx = os.path.commonprefix(
            [os.path.abspath(params.input[0]), common_pfx])
        if new_common_pfx not in ("", "."):
            input_base = new_common_pfx

    # Generate subfolder paths
    paths = dict(
        obj_base=os.path.join(int_base, "image_objects"),
        fin_base=os.path.join(int_base, "final"),
        log_base=os.path.join(int_base, "logs"),
        dials_log_base=os.path.join(int_base, "logs/dials_logs"),
        viz_base=os.path.join(int_base, "visualization"),
        tmp_base=os.path.join(int_base, "tmp"),
        input_base=input_base,
    )
    for bkey, bvalue in paths.items():
        if bkey == "input_base":
            continue
        if not os.path.isdir(bvalue):
            os.makedirs(bvalue)
    info.update(paths)

    # Generate filepaths for various info files
    info_files = dict(
        obj_list_file=os.path.join(info.tmp_base, "finished_objects.lst"),
        idx_file=os.path.join(info.int_base, "observations.pickle"),
    )
    info.update(info_files)

    # Initialize stat containers
    info = generate_stat_containers(info=info, params=params)

    # Initialize main log
    util.main_log(info.logfile, "{:*^80} \n".format(" IOTA MAIN LOG "))
    util.main_log(info.logfile, "{:-^80} \n".format(" SETTINGS FOR THIS RUN "))
    util.main_log(info.logfile, info.iota_phil)
    util.main_log(info.logfile, "{:-^80} \n".format("BACKEND SETTINGS"))
    util.main_log(info.logfile, info.target_phil)

    info.export_json()

    return info, params