def generate_html_report(self, test_list):
        """
        Descript. :
        """
        html_filename = os.path.join(\
           self.test_directory,
           self.test_filename)
        archive_filename = os.path.join(\
           self.test_directory,
           datetime.now().strftime("%Y_%m_%d_%H") + "_" + \
           self.test_filename)

        try:
            output_file = open(html_filename, "w") 
            output_file.write(SimpleHTML.create_html_start("Beamline test summary"))
            output_file.write("<h1>Beamline %s Test results</h1>" % self.beamline_name)

            output_file.write("<h2>Executed tests:</h2>")
            table_cells = []
            for test in self.results_list:
                table_cells.append(["bgcolor=%s" % TEST_COLORS_TABLE[test["result_bit"]],
                                   "<a href=#%s>%s</a>" % (test["short_name"], test["full_name"]), 
                                   test["result_short"],
                                   test["start_time"],
                                   test["end_time"]])
           
            table_rec = SimpleHTML.create_table(\
                ["Name", "Result", "Start time", "End time"], 
                table_cells)
            for row in table_rec:
                output_file.write(row)
            output_file.write("\n<hr>\n")
         
            for test_result in self.results_html_list:
                output_file.write(test_result + "\n")
      
            output_file.write(SimpleHTML.create_html_end())
            output_file.close()
 
            self.emit("htmlGenerated", html_filename)
            logging.getLogger("HWR").info(\
               "BeamlineTest: Test result written in file %s" % html_filename)
        except:
            logging.getLogger("HWR").error(\
               "BeamlineTest: Unable to generate html report file %s" % html_filename)

        try: 
            output_file = open(html_filename, "r")
            archive_file = open(archive_filename, "w")

            for line in output_file.readlines():
                archive_file.write(line)
            output_file.close()
            archive_file.close()

            logging.getLogger("HWR").info("Archive file :%s generated" % archive_filename)
        except:
            logging.getLogger("HWR").error(\
              "BeamlineTest: Unable to generate html report file %s" % archive_filename)
    def test_summary(self):
        """
        Descript. :
        """
        result = {}
        result["result_bit"] = True
        result["result_details"] = []
        table_cells = []

        for tine_prop in self['tine_props']:
            prop_names = eval(tine_prop.getProperty("prop_names"))
            if isinstance(prop_names, str):
                cell_str_list = []
                cell_str_list.append(tine_prop.getProperty("prop_device"))
                cell_str_list.append(prop_names)
                cell_str_list.append(str(tine.get(tine_prop.getProperty("prop_device"), prop_names)))
                table_cells.append(cell_str_list)
            else:
                for index, property_name in enumerate(prop_names):
                    cell_str_list = []
                    if index == 0:
                        cell_str_list.append(tine_prop.getProperty("prop_device"))
                    else:
                        cell_str_list.append("")
                    cell_str_list.append(property_name)
                    cell_str_list.append(str(tine.get(tine_prop.getProperty("prop_device"), property_name)))
                    table_cells.append(cell_str_list)                    
 
        result["result_details"] = SimpleHTML.create_table(\
             ["Context/Server/Device", "Property", "Value"],
             table_cells)
        self.ready_event.set()
        return result
    def test_focusing(self): 
        result = {}
        result["result_details"] = []

        active_mode, beam_size = self.get_focus_mode()
        if active_mode is None:
            result["result_bit"] = False
            result["result_short"] = "No focusing mode detected"
        else:
            result["result_bit"] = True
            result["result_short"] = "%s mode detected" % active_mode

        focus_modes = self.get_focus_mode_names()
        focus_motors_list = self.get_focus_motors()

        table_cells = []
        if focus_motors_list:
            for motor in focus_motors_list:
                table_row = []
                table_row.append(motor['motorName'])
                for focus_mode in focus_modes:
                    res = (focus_mode in motor['focMode'])
                    table_row.append("<td bgcolor=%s>%.3f/%.3f</td>" % (\
                         TEST_COLORS_TABLE[res],
                         motor['focusingModes'][focus_mode], 
                         motor['position']))                        
                table_cells.append(table_row)
        
        focus_modes = ["Motors"] + list(focus_modes)
        result["result_details"] = SimpleHTML.create_table(\
              focus_modes, table_cells)
        self.ready_event.set()
        return result
    def test_com(self):
        """
        Descript. :
        """
        result = {} 
        table_header = ["Replied", "DNS name", "IP address", "Location",
                        "MAC address", "Details"] 
        table_cells = []
        failed_count = 0
        for row, device in enumerate(self.devices_list):
            msg = "Pinging %s at %s" % (device[0], device[1])
            logging.getLogger("HWR").debug("BeamlineTest: %s" % msg)
            device_result = ["bgcolor=#FFCCCC" , "False"] + device
            try:
                ping_result = os.system("ping -W 2 -c 2 " + device[1]) == 0
                device_result[0] = "bgcolor=%s" % TEST_COLORS_TABLE[ping_result]
                device_result[1] = str(ping_result)
            except:
                ping_result = False
            table_cells.append(device_result) 

            if not ping_result:
                failed_count += 1
            progress_info = {"progress_total": len(self.devices_list),
                             "progress_msg": msg}
            self.emit("testProgress", (row, progress_info))

        result["result_details"] = SimpleHTML.create_table(table_header, table_cells)

        if failed_count == 0:
            result["result_short"] = "Test passed (got reply from all devices)"
            result["result_bit"] = True
        else:
            result["result_short"] = "Test failed (%d devices from %d did not replied)" % \
                  (failed_count, len(self.devices_list))
            result["result_bit"] = False
        self.ready_event.set()
        return result
    def set_processing_status(self, status):
        """Sets processing status and finalize the processing
           Method called from EDNA via xmlrpc

        :param status: processing status (Success, Failed)
        :type status: str
        """

        log = logging.getLogger("HWR")
        self.params_dict["status"] = status

        if status == "Failed": 
            self.emit("processingFailed")
        else:
            self.emit("processingFinished")

        
        self.emit("paralleProcessingResults",
                  (self.processing_results_align,
                   self.params_dict,
                   True))

        #Processing finished. Results are aligned and 10 best positions estimated
        self.params_dict["processing_programs"] = "EDNAdozor"
        self.params_dict["processing_end_time"] = \
            time.strftime("%Y-%m-%d %H:%M:%S")
        self.params_dict["max_dozor_score"] = \
            self.processing_results_align["score"].max()
        best_positions = self.processing_results_align.get("best_positions", [])

        # We store MeshScan and XrayCentring workflow in ISPyB
        # Parallel processing is also executed for all osc that have
        # more than 20 images, but results are not stored as workflow

        fig, ax = plt.subplots(nrows=1, ncols=1)
        if self.params_dict["lines_num"] > 1:
            log.info("Saving autoprocessing program in ISPyB")
            self.lims_hwobj.store_autoproc_program(self.params_dict)

            log.info("Saving processing results in ISPyB")
            workflow_id, workflow_mesh_id, grid_info_id = \
                 self.lims_hwobj.store_workflow(self.params_dict)
            self.params_dict["workflow_id"] = workflow_id
            self.params_dict["workflow_mesh_id"] = workflow_mesh_id
            self.params_dict["grid_info_id"] = grid_info_id

            self.collect_hwobj.update_lims_with_workflow(workflow_id,
                 self.params_dict["grid_snapshot_filename"])

            try:
                html_filename = os.path.join(self.params_dict["result_file_path"],
                                             "index.html")
                log.info("Generating results html %s" % html_filename)
                SimpleHTML.generate_mesh_scan_report(\
                    self.processing_results_align, self.params_dict,
                    html_filename)
            except:
                log.exception("Could not create result html %s" % html_filename)

            # Heat map generation
            # If mesh scan then a 2D plot
            im = ax.imshow(self.processing_results_align["score"],
                           interpolation='none', aspect='auto',
                           extent=[0, self.processing_results_align["score"].shape[1], 0,
                                   self.processing_results_align["score"].shape[0]])
            if len(best_positions) > 0:
                plt.axvline(x=best_positions[0]["col"] - 0.5, linewidth=0.5)
                plt.axhline(y=best_positions[0]["row"] - 0.5, linewidth=0.5)

            divider = make_axes_locatable(ax)
            cax = divider.append_axes("right", size=0.1, pad=0.05)
            cax.tick_params(axis='x', labelsize=8)
            cax.tick_params(axis='y', labelsize=8)
            plt.colorbar(im, cax=cax)
            im.set_cmap('hot')
        else:
            #if helical line then a line plot
            plt.plot(self.processing_results_align["score"],
                     label="Total score",
                     color="r")
            plt.plot(self.processing_results_align["spots_num"],
                     label="Number of spots",
                     linestyle="None",
                     color="b",
                     marker="o")
            plt.plot(self.processing_results_align["spots_int_aver"],
                     label="Int aver",
                     linestyle="None",
                     color="g",
                     marker="s")
            plt.plot(self.processing_results_align["spots_resolution"],
                     linestyle="None",
                     label="Resolution",
                     color="m",
                     marker="s")
            plt.legend()
            ylim = ax.get_ylim()
            ax.set_ylim((-1, ylim[1]))

        ax.tick_params(axis='x', labelsize=8)
        ax.tick_params(axis='y', labelsize=8)
        ax.set_title(self.params_dict["title"], fontsize=8)

        ax.grid(True)
        ax.spines['left'].set_position(('outward', 10))
        ax.spines['bottom'].set_position(('outward', 10))

        processing_plot_file = os.path.join(self.params_dict\
             ["directory"], "parallel_processing_result.png")
        processing_plot_archive_file = os.path.join(self.params_dict\
             ["processing_archive_directory"], "parallel_processing_result.png")

        try:
            log.info("Saving heat map figure %s" % \
                processing_plot_file)
            if not os.path.exists(os.path.dirname(processing_plot_file)):
                os.makedirs(os.path.dirname(processing_plot_file))
            fig.savefig(processing_plot_file, dpi=150, bbox_inches='tight')
        except:
            log.exception("Could not save figure %s" % \
                processing_plot_file)
        try:
            log.info("Saving heat map figure for ISPyB %s" % \
                processing_plot_archive_file)
            if not os.path.exists(os.path.dirname(processing_plot_archive_file)):
                os.makedirs(os.path.dirname(processing_plot_archive_file))
            fig.savefig(processing_plot_archive_file, dpi=150, bbox_inches='tight')
        except:
            log.exception("Could not save figure for ISPyB %s" % \
                processing_plot_archive_file)
        plt.close(fig)
        self.done_event.set()
예제 #6
0
    def do_processing_result_polling(self, processing_params, wait_timeout, grid_object):
        """
        Descript. : Method polls processing results. Based on the polling of 
                    edna result files. After each result file results are 
                    aligned to match diffractometer configuration
                    If processing do not fail (files appear before timeout)
                    heat map is created and results are stored in ispyb.
                    If processing was executed for helical line then heat map
                    as a line plot is generated and best positions are estimated.
                    If processing was executed for a grid then 2d plot is generated,
                    best positions are estimated and stored in ispyb. Also mesh
                    parameters and processing results as a workflow are stored
                    in ispyb.
        Args.     : wait_timeout (file waiting timeout is sec.)
        Return.   : list of 10 best positions. If processing fails returns None 
        """
        processing_result = {
            "image_num": numpy.zeros(processing_params["images_num"]),
            "spots_num": numpy.zeros(processing_params["images_num"]),
            "spots_int_aver": numpy.zeros(processing_params["images_num"]),
            "spots_resolution": numpy.zeros(processing_params["images_num"]),
            "score": numpy.zeros(processing_params["images_num"]),
        }

        processing_params["status"] = "Success"
        failed = False

        do_polling = True
        result_file_index = 0
        _result_place = []
        _first_frame_timout = 5 * 60 / 10
        _time_out = _first_frame_timout
        _start_time = time.time()

        while _result_place == [] and time.time() - _start_time < _time_out:
            _result_place = glob.glob(os.path.join(processing_params["directory"], "EDApplication*/"))
            gevent.sleep(0.2)
        if _result_place == []:
            # self.dozor_done_event.set()
            msg = "ParallelProcessing: Failed to read dozor result directory %s" % processing_params["directory"]
            logging.error(msg)
            processing_params["status"] = "Failed"
            processing_params["comments"] += "Failed: " + msg
            self.emit("processingFailed")
            failed = True

        while do_polling and not failed:
            file_appeared = False
            result_file_name = os.path.join(_result_place[0], "ResultControlDozor_Chunk_%06d.xml" % result_file_index)
            wait_file_start = time.time()
            logging.debug("ParallelProcessing: Waiting for Dozor result file: %s" % result_file_name)
            while not file_appeared and time.time() - wait_file_start < wait_timeout:
                if os.path.exists(result_file_name) and os.stat(result_file_name).st_size > 0:
                    file_appeared = True
                    _time_out = wait_timeout
                    logging.debug(
                        "ParallelProcessing: Dozor file is there, size={0}".format(os.stat(result_file_name).st_size)
                    )
                else:
                    os.system("ls %s > /dev/null" % _result_place[0])
                    gevent.sleep(0.2)
            if not file_appeared:
                failed = True
                msg = "ParallelProcessing: Dozor result file ({0}) failed to appear after {1} seconds".format(
                    result_file_name, wait_timeout
                )
                logging.error(msg)
                processing_params["status"] = "Failed"
                processing_params["comments"] += "Failed: " + msg
                self.emit("processingFailed")

            # poll while the size increasing:
            _oldsize = -1
            _newsize = 0
            while _oldsize < _newsize:
                _oldsize = _newsize
                _newsize = os.stat(result_file_name).st_size
                gevent.sleep(0.1)

            dozor_output_file = XSDataResultControlDozor.parseFile(result_file_name)
            # this method could be improved with xml parsing
            for dozor_image in dozor_output_file.getImageDozor():
                image_index = dozor_image.getNumber().getValue() - 1
                processing_result["image_num"][image_index] = image_index
                processing_result["spots_num"][image_index] = dozor_image.getSpots_num_of().getValue()
                processing_result["spots_int_aver"][image_index] = dozor_image.getSpots_int_aver().getValue()
                processing_result["spots_resolution"][image_index] = dozor_image.getSpots_resolution().getValue()
                processing_result["score"][image_index] = dozor_image.getScore().getValue()
                image_index += 1
                do_polling = dozor_image.getNumber().getValue() != processing_params["images_num"]

            aligned_result = self.align_processing_results(dozor_result, processing_params)
            self.emit("processingSetResult", (aligned_result, processing_params, False))
            result_file_index += 1

        """
        gevent.sleep(3)
        #This is for test...
        for image_index in range(processing_params["images_num"]):
            processing_result["image_num"][image_index] = 0
            processing_result["spots_num"][image_index] = 0
            processing_result["spots_int_aver"][image_index] = 0
            processing_result["spots_resolution"][image_index] = 0
            processing_result["score"][image_index] = 0

            if image_index % 30 == 0:
                self.processing_results = self.align_processing_results(\
                     processing_result, processing_params, grid_object)
                self.emit("paralleProcessingResults", (self.processing_results, processing_params, False))
                gevent.sleep(1)
        processing_result["score"][2] = 80
        processing_result["score"][1] = 50
        processing_result["score"][0] = 20
        processing_result["score"][len(processing_result["score"]) / 2] = 100 
        processing_result["score"][-2] = 10
        processing_result["score"][-1] = 43
        """

        self.processing_results = self.align_processing_results(processing_result, processing_params, grid_object)
        self.emit("paralleProcessingResults", (self.processing_results, processing_params, True))

        # Processing finished. Results are aligned and 10 best positions estimated
        processing_params["processing_programs"] = "EDNAdozor"
        processing_params["processing_end_time"] = time.strftime("%Y-%m-%d %H:%M:%S")
        best_positions = self.processing_results.get("best_positions", [])

        # If lims used then and mesh then save results in ispyb
        # Autoprocessin program

        if processing_params["lines_num"] > 1:
            logging.getLogger("HWR").info(
                "ParallelProcessing: Saving information about autoprocessing program in ISPyB..."
            )
            autoproc_program_id = self.lims_hwobj.store_autoproc_program(processing_params)
            logging.getLogger("HWR").info("ParallelProcessing: Done")

            logging.getLogger("HWR").info(
                "ParallelProcessing: Saving processing results as MeshScan workflow in ISPyB..."
            )
            workflow_id, workflow_mesh_id, grid_info_id = self.lims_hwobj.store_workflow(processing_params)
            processing_params["workflow_id"] = workflow_id
            processing_params["workflow_mesh_id"] = workflow_mesh_id
            processing_params["grid_info_id"] = grid_info_id

            self.collect_hwobj.update_lims_with_workflow(workflow_id, processing_params["grid_snapshot_filename"])
            logging.getLogger("HWR").info("ParallelProcessing: Done")

            # If best positions detected then save them in ispyb
            if len(best_positions) > 0:
                logging.getLogger("HWR").info(
                    "ParallelProcessing: Saving %d best positions in ISPyB..." % len(best_positions)
                )

                motor_pos_id_list = []
                image_id_list = []
                for image in best_positions:
                    # Motor position is stored
                    motor_pos_id = self.lims_hwobj.store_centred_position(image["cpos"], image["col"], image["row"])
                    # Corresponding image is stored
                    image_id = self.collect_hwobj.process_image(image["index"], motor_pos_id)
                    # Image quality indicators are stored
                    image["image_id"] = image_id
                    image["auto_proc_program"] = autoproc_program_id

                    self.lims_hwobj.store_image_quality_indicators(image)

                    motor_pos_id_list.append(motor_pos_id)
                    image_id_list.append(image_id)

                processing_params["best_position_id"] = motor_pos_id_list[0]
                processing_params["best_image_id"] = image_id_list[0]
                logging.getLogger("HWR").info("ParallelProcessing: Done")

                logging.getLogger("HWR").info(
                    "ParallelProcessing: Updating best position of MeshScan workflow in ISPyB..."
                )
                self.lims_hwobj.store_workflow(processing_params)
                logging.getLogger("HWR").info("ParallelProcessing: Done")
            else:
                logging.getLogger("HWR").info("ParallelProcessing: No best positions found during the scan")

            try:
                html_filename = os.path.join(processing_params["result_file_path"], "index.html")
                logging.getLogger("HWR").info("ParallelProcessing: Generating results html %s" % html_filename)
                simpleHtml.generate_mesh_scan_report(self.processing_results, processing_params, html_filename)
            except:
                logging.getLogger("HWR").exception(
                    "ParallelProcessing: Could not create result html %s" % html_filename
                )

        # Heat map generation
        fig, ax = plt.subplots(nrows=1, ncols=1)
        if processing_params["lines_num"] > 1:
            # If mesh scan then a 2D plot
            im = ax.imshow(self.processing_results["score"], interpolation="none", aspect="auto")
            if len(best_positions) > 0:
                plt.axvline(x=best_positions[0]["col"] - 1, linewidth=0.5)
                plt.axhline(y=best_positions[0]["row"] - 1, linewidth=0.5)

            divider = make_axes_locatable(ax)
            cax = divider.append_axes("right", size=0.1, pad=0.05)
            cax.tick_params(axis="x", labelsize=8)
            cax.tick_params(axis="y", labelsize=8)
            plt.colorbar(im, cax=cax)
            im.set_cmap("hot")
        else:
            # if helical line then a line plot
            plt.plot(self.processing_results["score"])
            ylim = ax.get_ylim()
            ax.set_ylim((-1, ylim[1]))

        ax.tick_params(axis="x", labelsize=8)
        ax.tick_params(axis="y", labelsize=8)
        ax.set_title(processing_params["title"], fontsize=8)

        ax.grid(True)
        ax.spines["left"].set_position(("outward", 10))
        ax.spines["bottom"].set_position(("outward", 10))

        processing_plot_file = os.path.join(processing_params["directory"], "parallel_processing_result.png")
        processing_plot_archive_file = os.path.join(
            processing_params["processing_archive_directory"], "parallel_processing_result.png"
        )

        try:
            logging.getLogger("HWR").info("ParallelProcessing: Saving heat map figure %s" % processing_plot_file)
            if not os.path.exists(os.path.dirname(processing_plot_file)):
                os.makedirs(os.path.dirname(processing_plot_file))
            fig.savefig(processing_plot_file, dpi=150, bbox_inches="tight")
        except:
            logging.getLogger("HWR").exception("ParallelProcessing: Could not save figure %s" % processing_plot_file)
        try:
            logging.getLogger("HWR").info(
                "ParallelProcessing: Saving heat map figure for ISPyB %s" % processing_plot_archive_file
            )
            if not os.path.exists(os.path.dirname(processing_plot_archive_file)):
                os.makedirs(os.path.dirname(processing_plot_archive_file))
            fig.savefig(processing_plot_archive_file, dpi=150, bbox_inches="tight")
        except:
            logging.getLogger("HWR").exception(
                "ParallelProcessing: Could not save figure for ISPyB %s" % processing_plot_archive_file
            )
        plt.close(fig)
        self.processing_done_event.set()
    def store_processing_results(self, status):
        """Stores result plots. In the case of MeshScan and XrayCentering
           html is created and results saved in ISPyB

        :param status: status type
        :type status: str
        """
        log = logging.getLogger("HWR")

        self.started = False
        self.params_dict["status"] = status

        # ---------------------------------------------------------------------
        # 1. Assembling all file names
        self.params_dict["processing_programs"] = "EDNAdozor"
        self.params_dict["processing_end_time"] = time.strftime("%Y-%m-%d %H:%M:%S")
        self.params_dict["max_dozor_score"] = self.results_aligned["score"].max()
        best_positions = self.results_aligned.get("best_positions", [])

        processing_plot_file = os.path.join(
            self.params_dict["directory"], "parallel_processing_result.png"
        )
        processing_grid_overlay_file = os.path.join(
            self.params_dict["directory"], "grid_overlay.png"
        )
        processing_plot_archive_file = os.path.join(
            self.params_dict["processing_archive_directory"],
            "parallel_processing_result.png",
        )
        processing_csv_archive_file = os.path.join(
            self.params_dict["processing_archive_directory"],
            "parallel_processing_result.csv",
        )

        # If MeshScan and XrayCentring then info is stored in ISPyB
        if self.params_dict["workflow_type"] in ("MeshScan", "XrayCentering"):
            log.info("Parallel processing: Saving results in ISPyB...")
            self.lims_hwobj.store_autoproc_program(self.params_dict)
            if self.data_collection.workflow_id is not None:
                self.params_dict["workflow_id"] = self.data_collection.workflow_id

            workflow_id, workflow_mesh_id, grid_info_id = self.lims_hwobj.store_workflow(
                self.params_dict
            )

            self.params_dict["workflow_id"] = workflow_id
            self.params_dict["workflow_mesh_id"] = workflow_mesh_id
            self.params_dict["grid_info_id"] = grid_info_id
            self.data_collection.workflow_id = workflow_id

            self.collect_hwobj.update_lims_with_workflow(
                workflow_id, self.params_dict["grid_snapshot_filename"]
            )
            self.lims_hwobj.store_workflow_step(self.params_dict)

            # self.lims_hwobj.set_image_quality_indicators_plot(
            #     self.collect_hwobj.collection_id,
            #     processing_plot_archive_file,
            #     processing_csv_archive_file)

            if len(best_positions) > 0:
                self.collect_hwobj.store_image_in_lims_by_frame_num(
                    best_positions[0]["index"]
                )
            log.info("Parallel processing: Results saved in ISPyB")

            try:
                html_filename = os.path.join(
                    self.params_dict["result_file_path"], "index.html"
                )
                SimpleHTML.generate_mesh_scan_report(
                    self.results_aligned, self.params_dict, html_filename
                )
                log.info("Parallel processing: Results html saved %s" % html_filename)
            except BaseException:
                log.exception(
                    "Parallel processing: Could not save results html %s"
                    % html_filename
                )

        fig, ax = plt.subplots(nrows=1, ncols=1)
        if self.params_dict["lines_num"] > 1:
            current_max = max(fig.get_size_inches())
            grid_width = self.params_dict["steps_x"] * self.params_dict["xOffset"]
            grid_height = self.params_dict["steps_y"] * self.params_dict["yOffset"]

            if grid_width > grid_height:
                fig.set_size_inches(current_max, current_max * grid_height / grid_width)
            else:
                fig.set_size_inches(current_max * grid_width / grid_height, current_max)

            im = ax.imshow(
                numpy.transpose(self.results_aligned["score"]),
                interpolation="none",
                aspect="auto",
                extent=[
                    0,
                    self.results_aligned["score"].shape[0],
                    0,
                    self.results_aligned["score"].shape[1],
                ],
            )
            im.set_cmap("hot")

            try:
                if not os.path.exists(os.path.dirname(processing_grid_overlay_file)):
                    os.makedirs(os.path.dirname(processing_grid_overlay_file))

                plt.imsave(
                    processing_grid_overlay_file,
                    numpy.transpose(self.results_aligned["score"]),
                    format="png",
                    cmap="hot",
                )
                self.grid.set_overlay_pixmap(processing_grid_overlay_file)
                log.info(
                    "Parallel processing: Grid overlay figure saved %s"
                    % processing_grid_overlay_file
                )
            except BaseException:
                log.exception(
                    "Parallel processing: Could not save grid overlay figure %s"
                    % processing_grid_overlay_file
                )

            if len(best_positions) > 0:
                plt.axvline(x=best_positions[0]["col"], linewidth=0.5)
                plt.axhline(y=best_positions[0]["row"], linewidth=0.5)

                divider = make_axes_locatable(ax)
                cax = divider.append_axes("right", size=0.1, pad=0.05)
                cax.tick_params(axis="x", labelsize=8)
                cax.tick_params(axis="y", labelsize=8)
                plt.colorbar(im, cax=cax)
        else:
            max_resolution = self.params_dict["resolution"]
            min_resolution = self.results_aligned["spots_resolution"].max()

            # TODO plot results based on the result_name_list
            max_score = self.results_aligned["score"].max()
            if max_score == 0:
                max_score = 1
            max_spots_num = self.results_aligned["spots_num"].max()
            if max_spots_num == 0:
                max_spots_num = 1

            plt.plot(
                self.results_aligned["score"] / max_score, ".", label="Score", c="r"
            )
            plt.plot(
                self.results_aligned["spots_num"] / max_spots_num,
                ".",
                label="Number of spots",
                c="b",
            )
            plt.plot(
                self.results_aligned["spots_resolution"], ".", label="Resolution", c="y"
            )

            ax.legend(
                loc="lower center",
                fancybox=True,
                numpoints=1,
                borderaxespad=0.0,
                bbox_to_anchor=(0.5, -0.13),
                ncol=3,
            )
            ax.set_ylim(-0.01, 1.1)

            positions = numpy.linspace(
                0, self.results_aligned["spots_resolution"].max(), 5
            )
            labels = ["inf"]
            for item in positions[1:]:
                labels.append("%.2f" % (1.0 / item))
            ax.set_yticks(positions)
            ax.set_yticklabels(labels)

            # new_labels = numpy.linspace(min_resolution, max_resolution / 1.2, len(ax.get_yticklabels()))
            # new_labels = numpy.round(new_labels, 1)
            # ax.set_yticklabels(new_labels)
            ax.set_ylabel("Resolution")

            ay1 = ax.twinx()
            new_labels = numpy.linspace(
                0,
                self.results_aligned["spots_num"].max(),
                len(ay1.get_yticklabels()),
                dtype=numpy.int16,
            )
            ay1.set_yticklabels(new_labels)
            ay1.set_ylabel("Number of spots")

        ax.tick_params(axis="x", labelsize=8)
        ax.tick_params(axis="y", labelsize=8)
        ax.set_title(self.params_dict["title"], fontsize=8)

        ax.grid(True)
        ax.spines["left"].set_position(("outward", 10))
        ax.spines["bottom"].set_position(("outward", 10))

        self.lims_hwobj.set_image_quality_indicators_plot(
            self.collect_hwobj.collection_id,
            processing_plot_archive_file,
            processing_csv_archive_file,
        )

        try:
            if not os.path.exists(os.path.dirname(processing_plot_file)):
                os.makedirs(os.path.dirname(processing_plot_file))
            fig.savefig(processing_plot_file, dpi=150, bbox_inches="tight")
            log.info(
                "Parallel processing: Heat map figure %s saved" % processing_plot_file
            )
        except BaseException:
            log.exception(
                "Parallel processing: Could not save figure %s" % processing_plot_file
            )
        try:
            if not os.path.exists(os.path.dirname(processing_plot_archive_file)):
                os.makedirs(os.path.dirname(processing_plot_archive_file))
            fig.savefig(processing_plot_archive_file, dpi=150, bbox_inches="tight")
            log.info(
                "Parallel processing: Archive heat map figure %s saved"
                % processing_plot_archive_file
            )
        except BaseException:
            log.exception(
                "Parallel processing: Could not save archive figure %s"
                % processing_plot_archive_file
            )

        plt.close(fig)

        # Writes results in the csv file
        """
    def do_processing_result_polling(self, processing_params, wait_timeout, grid):
        """Method polls processing results. Based on the polling of edna 
           result files. After each result file results are aligned to match 
           the diffractometer configuration.
           If processing succed (files appear before timeout) then a heat map 
           is created and results are stored in ispyb.
           If processing was executed for helical line then heat map as a 
           line plot is generated and best positions are estimated.
           If processing was executed for a grid then 2d plot is generated,
           best positions are estimated and stored in ispyb. Also mesh
           parameters and processing results as a workflow are stored in ispyb.
        Args.     : wait_timeout (file waiting timeout is sec.)
        Return.   : list of 10 best positions. If processing fails returns None 
        """
        processing_result = {"image_num" : numpy.zeros(processing_params["images_num"]),
                        "spots_num" : numpy.zeros(processing_params["images_num"]),
                        "spots_int_aver" : numpy.zeros(processing_params["images_num"]),
                        "spots_resolution" : numpy.zeros(processing_params["images_num"]),
                        "score" : numpy.zeros(processing_params["images_num"])}

        processing_params["status"] = "Success"
        failed = False

        """
        do_polling = True
        result_file_index = 0
        _result_place = []
        _first_frame_timout = 5 * 60 / 10
        _time_out = _first_frame_timout
        _start_time = time.time()
       
        while _result_place == [] and time.time() - _start_time < _time_out :
           _result_place = glob.glob(os.path.join(\
                 processing_params["directory"],"EDApplication*/"))
           gevent.sleep(0.2)
        if _result_place == [] : 
           msg = "ParallelProcessing: Failed to read dozor result " + \
                 "rectory %s" % processing_params["directory"]
           logging.error(msg)
           processing_params["status"] = "Failed"
           processing_params["comments"] += "Failed: " + msg
           self.emit("processingFailed")
           self.processing_done_event.set()
           failed = True

        while do_polling and not failed:
            file_appeared = False
            result_file_name = os.path.join(_result_place[0], 
                  "ResultControlDozor_Chunk_%06d.xml" % result_file_index)
            wait_file_start = time.time()
            logging.debug("ParallelProcessing: Waiting for Dozor result " +\
                          "file: %s" % result_file_name)
            while not file_appeared and time.time() - wait_file_start < wait_timeout:
                if os.path.exists(result_file_name) and os.stat(result_file_name).st_size > 0:
                    file_appeared = True
                    _time_out = wait_timeout
                    logging.debug("ParallelProcessing: Dozor file is there," +\
                                  " size={0}".format(os.stat(result_file_name).st_size))
                else:
                    os.system("ls %s > /dev/null" %_result_place[0])
                    gevent.sleep(0.4)
            if not file_appeared:
                failed = True
                msg = "ParallelProcessing: Dozor result file ({0}) " +\
                      "failed to appear after {1} seconds".\
                      format(result_file_name, wait_timeout)
                logging.error(msg)
                processing_params["status"] = "Failed"
                processing_params["comments"] += "Failed: " + msg
                self.emit("processingFailed")
                self.processing_done_event.set()
                return
                
            # poll while the size increasing:
            _oldsize = -1 
            _newsize =  0
            while _oldsize < _newsize :
                _oldsize = _newsize
                _newsize = os.stat(result_file_name).st_size
                gevent.sleep(0.3)
                                
            dozor_output_file = XSDataResultControlDozor.parseFile(result_file_name)
            #this method could be improved with xml parsing
            for dozor_image in dozor_output_file.getImageDozor():
                image_index = dozor_image.getNumber().getValue() - 1
                processing_result["image_num"][image_index] = image_index
                processing_result["spots_num"][image_index] = \
                      dozor_image.getSpots_num_of().getValue()
                processing_result["spots_int_aver"][image_index] = \
                      dozor_image.getSpots_int_aver().getValue()   
                processing_result["spots_resolution"][image_index] = \
                      dozor_image.getSpots_resolution().getValue()
                processing_result["score"][image_index] = dozor_image.getScore().getValue()
                image_index += 1
                do_polling = (dozor_image.getNumber().getValue() != \
                      processing_params["images_num"])

            aligned_result = self.align_processing_results(\
			    processing_result, processing_params, grid)
            self.emit("paralleProcessingResults", (aligned_result, processing_params, False))
            result_file_index += 1

        """
        gevent.sleep(10)
        #This is for test...

        for key in processing_result.keys():
            processing_result[key] = numpy.linspace(0, 
                 processing_params["images_num"], 
                 processing_params["images_num"]).astype('uint8')
        #processing_result['score'][20] = 10

        self.processing_results = self.align_processing_results(\
             processing_result, processing_params, grid)

        self.emit("paralleProcessingResults", (self.processing_results,
                                               processing_params,
                                               True)) 

        #Processing finished. Results are aligned and 10 best positions estimated
        processing_params["processing_programs"] = "EDNAdozor"
        processing_params["processing_end_time"] = time.strftime("%Y-%m-%d %H:%M:%S")
        processing_params["max_dozor_score"] = self.processing_results["score"].max()
        best_positions = self.processing_results.get("best_positions", []) 
 
        #If lims used then and mesh then save results in ispyb
        #Autoprocessin program

        if processing_params["lines_num"] > 1:
            logging.getLogger("HWR").info("ParallelProcessing: Saving autoprocessing program in ISPyB")
            autoproc_program_id = self.lims_hwobj.store_autoproc_program(processing_params)             

            logging.getLogger("HWR").info("ParallelProcessing: Saving processing results in ISPyB")
            workflow_id, workflow_mesh_id, grid_info_id = \
                 self.lims_hwobj.store_workflow(processing_params)
            processing_params["workflow_id"] = workflow_id
            processing_params["workflow_mesh_id"] = workflow_mesh_id
            processing_params["grid_info_id"] = grid_info_id

            self.collect_hwobj.update_lims_with_workflow(workflow_id, 
                 processing_params["grid_snapshot_filename"])

            try:
                html_filename = os.path.join(processing_params["result_file_path"], "index.html")
                logging.getLogger("HWR").info("ParallelProcessing: Generating" +\
                        " results html %s" % html_filename)
                simpleHtml.generate_mesh_scan_report(self.processing_results, 
                                                     processing_params, 
                                                     html_filename)
            except:
                logging.getLogger("HWR").exception("ParallelProcessing: " +\
                        "Could not create result html %s" % html_filename)

        # Heat map generation
        fig, ax = plt.subplots(nrows=1, ncols=1 )
        if processing_params["lines_num"] > 1: 
            #If mesh scan then a 2D plot
            im = ax.imshow(self.processing_results["score"], 
                           interpolation = 'none', aspect='auto',
                           extent = [0, self.processing_results["score"].shape[1], 0, 
                                     self.processing_results["score"].shape[0]])
            if len(best_positions) > 0:
                plt.axvline(x = best_positions[0]["col"] - 0.5, linewidth=0.5)
                plt.axhline(y = best_positions[0]["row"] - 0.5, linewidth=0.5)

            divider = make_axes_locatable(ax)
            cax = divider.append_axes("right", size=0.1, pad=0.05)
            cax.tick_params(axis='x', labelsize=8)
            cax.tick_params(axis='y', labelsize=8)
            plt.colorbar(im, cax=cax)
            im.set_cmap('hot')
        else:
            #if helical line then a line plot
            plt.plot(self.processing_results["score"])
            ylim = ax.get_ylim()
            ax.set_ylim((-1, ylim[1]))

        ax.tick_params(axis='x', labelsize=8)
        ax.tick_params(axis='y', labelsize=8)
        ax.set_title(processing_params["title"], fontsize=8)

        ax.grid(True)
        ax.spines['left'].set_position(('outward', 10))
        ax.spines['bottom'].set_position(('outward', 10))

        processing_plot_file = os.path.join(processing_params\
             ["directory"], "parallel_processing_result.png")
        processing_plot_archive_file = os.path.join(processing_params\
             ["processing_archive_directory"], "parallel_processing_result.png")

        try:
            logging.getLogger("HWR").info("ParallelProcessing: Saving "+\
                                          "heat map figure %s" % \
                                          processing_plot_file)
            if not os.path.exists(os.path.dirname(processing_plot_file)):
                os.makedirs(os.path.dirname(processing_plot_file))
            fig.savefig(processing_plot_file, dpi = 150, bbox_inches = 'tight')
        except:
            logging.getLogger("HWR").exception("ParallelProcessing: Could " +\
                    "not save figure %s" % processing_plot_file)
        try:
            logging.getLogger("HWR").info("ParallelProcessing: Saving heat " +\
                    "map figure for ISPyB %s" % processing_plot_archive_file)
            if not os.path.exists(os.path.dirname(processing_plot_archive_file)):
                os.makedirs(os.path.dirname(processing_plot_archive_file))
            fig.savefig(processing_plot_archive_file, dpi = 150, bbox_inches = 'tight')
        except:
            logging.getLogger("HWR").exception("ParallelProcessing: Could " +\
                    "not save figure for ISPyB %s" % processing_plot_archive_file) 
        plt.close(fig)
        self.processing_done_event.set()
    def set_processing_status(self, status):
        """Sets processing status and finalize the processing
           Method called from EDNA via xmlrpc

        :param status: processing status (Success, Failed)
        :type status: str
        """
        self.done_event.set()

        self.emit("paralleProcessingResults",
                  (self.results_aligned,
                   self.params_dict,
                   False))
        if status == "Failed":
            self.emit("processingFailed")
        else:
            self.emit("processingFinished")

        self.started = False
        log = logging.getLogger("HWR")
        self.params_dict["status"] = status

        # --------------------------------------------------------------------- 
        # 1. Assembling all file names
        self.params_dict["processing_programs"] = "EDNAdozor"
        self.params_dict["processing_end_time"] = \
            time.strftime("%Y-%m-%d %H:%M:%S")
        self.params_dict["max_dozor_score"] = \
            self.results_aligned["score"].max()
        best_positions = self.results_aligned.get("best_positions", [])

        processing_plot_file = os.path.join(self.params_dict\
             ["directory"], "parallel_processing_result.png")
        processing_grid_overlay_file = os.path.join(self.params_dict\
             ["directory"], "grid_overlay.png")
        processing_plot_archive_file = os.path.join(self.params_dict\
             ["processing_archive_directory"], "parallel_processing_result.png")
        processing_csv_archive_file = os.path.join(self.params_dict\
             ["processing_archive_directory"], "parallel_processing_result.csv")

        # We store MeshScan and XrayCentring workflow in ISPyB
        # Parallel processing is also executed for all osc that have
        # more than 20 images, but results are not stored as workflow

        fig, ax = plt.subplots(nrows=1, ncols=1)
 
        if self.params_dict["lines_num"] > 1:
            # -----------------------------------------------------------------
            # 2. Storing results in ISPyB
            log.info("Processing: Saving processing results in ISPyB")
            self.lims_hwobj.store_autoproc_program(self.params_dict)
            workflow_id, workflow_mesh_id, grid_info_id = \
                 self.lims_hwobj.store_workflow(self.params_dict)
            self.params_dict["workflow_id"] = workflow_id
            self.params_dict["workflow_mesh_id"] = workflow_mesh_id
            self.params_dict["grid_info_id"] = grid_info_id

            self.collect_hwobj.update_lims_with_workflow(\
                 workflow_id,
                 self.params_dict["grid_snapshot_filename"])

            self.lims_hwobj.store_workflow_step(self.params_dict)

            #TODO Get collection id from collection object
            self.lims_hwobj.set_image_quality_indicators_plot(\
                 self.collect_hwobj.collection_id,
                 processing_plot_archive_file,
                 processing_csv_archive_file)

            # --------------------------------------------------------------------- 
            # 3. If there are frames with score, then generate map for the best one
            if len(best_positions) > 0:
                self.collect_hwobj.store_image_in_lims_by_frame_num(best_positions[0]["index"])

            try:
                html_filename = os.path.join(self.params_dict["result_file_path"],
                                             "index.html")
                log.info("Processing: Generating results html %s" % html_filename)
                SimpleHTML.generate_mesh_scan_report(\
                    self.results_aligned, self.params_dict,
                    html_filename)
            except:
                log.exception("Processing: Could not create result html %s" % html_filename)

            current_max = max(fig.get_size_inches()) 
            grid_width = self.params_dict["steps_x"] * \
                         self.params_dict["xOffset"]
            grid_height = self.params_dict["steps_y"] * \
                          self.params_dict["yOffset"]

            if grid_width > grid_height:
                fig.set_size_inches(current_max,
                                    current_max * \
                                    grid_height / \
                                    grid_width)
            else:
                fig.set_size_inches(current_max * \
                                    grid_width / \
                                    grid_height,
                                    current_max)
            # Heat map generation
            # If mesh scan then a 2D plot
            im = ax.imshow(numpy.transpose(self.results_aligned["score"]),
                           interpolation='none', aspect='auto',
                           extent=[0, self.results_aligned["score"].shape[0], 0,
                                   self.results_aligned["score"].shape[1]])
            im.set_cmap('hot')

            try:
                log.info("Processing: Saving heat map figure for grid overlay %s" % \
                    processing_grid_overlay_file)
                if not os.path.exists(os.path.dirname(processing_grid_overlay_file)):
                    os.makedirs(os.path.dirname(processing_grid_overlay_file))

                #fig.savefig(processing_grid_overlay_file, dpi=150, transparent=True)
                
                plt.imsave(processing_grid_overlay_file,
                           numpy.transpose(self.results_aligned["score"]),
                           format="png",
                           cmap="hot")
                self.grid.set_overlay_pixmap(processing_grid_overlay_file)
            except:
                log.exception("Processing: Could not save figure for ISPyB %s" % \
                    processing_grid_overlay_file)
            
            if len(best_positions) > 0:
                plt.axvline(x=best_positions[0]["col"], linewidth=0.5)
                plt.axhline(y=best_positions[0]["row"], linewidth=0.5)

            divider = make_axes_locatable(ax)
            cax = divider.append_axes("right", size=0.1, pad=0.05)
            cax.tick_params(axis='x', labelsize=8)
            cax.tick_params(axis='y', labelsize=8)
            plt.colorbar(im, cax=cax)
        else:
            #if helical line then a line plot
            plt.plot(self.results_aligned["score"],
                     label="Total score",
                     color="r")
            plt.plot(self.results_aligned["spots_num"],
                     label="Number of spots",
                     linestyle="None",
                     color="b",
                     marker="o")
            plt.plot(self.results_aligned["spots_int_aver"],
                     label="Int aver",
                     linestyle="None",
                     color="g",
                     marker="s")
            plt.plot(self.results_aligned["spots_resolution"],
                     linestyle="None",
                     label="Resolution",
                     color="m",
                     marker="s")
            plt.legend()
            ylim = ax.get_ylim()
            ax.set_ylim((-1, ylim[1]))

        ax.tick_params(axis='x', labelsize=8)
        ax.tick_params(axis='y', labelsize=8)
        ax.set_title(self.params_dict["title"], fontsize=8)

        ax.grid(True)
        ax.spines['left'].set_position(('outward', 10))
        ax.spines['bottom'].set_position(('outward', 10))

        try:
            log.info("Processing: Saving heat map figure %s" % \
                processing_plot_file)
            if not os.path.exists(os.path.dirname(processing_plot_file)):
                os.makedirs(os.path.dirname(processing_plot_file))
            fig.savefig(processing_plot_file, dpi=150, bbox_inches='tight')
        except:
            log.exception("Processing: Could not save figure %s" % \
                processing_plot_file)
        try:
            log.info("Processing: Saving heat map figure for ISPyB %s" % \
                processing_plot_archive_file)
            if not os.path.exists(os.path.dirname(processing_plot_archive_file)):
                os.makedirs(os.path.dirname(processing_plot_archive_file))
            fig.savefig(processing_plot_archive_file, dpi=150, bbox_inches='tight')
        except:
            log.exception("Processing: Could not save figure for ISPyB %s" % \
                processing_plot_archive_file)

        plt.close(fig)

        log.info("Processing: Results saved in ISPyB")
    def test_measure_intensity(self):
        """
        """
        result = {}
        result["result_bit"] = True
        result["result_details"] = []

        current_phase = self.bl_hwobj.diffractometer_hwobj.current_phase 

        # 1. close guillotine and fast shutter --------------------------------
        self.bl_hwobj.collect_hwobj.close_guillotine(wait=True)
        self.bl_hwobj.fast_shutter_hwobj.closeShutter(wait=True)
        gevent.sleep(0.1)        

        #2. move back light in, check beamstop position -----------------------
        self.bl_hwobj.back_light_hwobj.move_in()

        beamstop_position = self.bl_hwobj.beamstop_hwobj.get_position()
        if beamstop_position == "BEAM":
            self.bl_hwobj.beamstop_hwobj.set_position("OFF") 
            self.bl_hwobj.diffractometer_hwobj.wait_device_ready(30)

        #3. check scintillator position --------------------------------------
        scintillator_position = self.bl_hwobj.\
            diffractometer_hwobj.get_scintillator_position() 
        if scintillator_position == "SCINTILLATOR":
            self.bl_hwobj.diffractometer_hwobj.\
                 set_scintillator_position("PHOTODIODE")
            self.bl_hwobj.diffractometer_hwobj.\
                 wait_device_ready(30)

        #5. open the fast shutter --------------------------------------------
        self.bl_hwobj.fast_shutter_hwobj.openShutter(wait=True)
        gevent.sleep(0.3)

        #6. measure mean intensity
        self.ampl_chan_index = 0

        if True:
            intens_value = self.chan_intens_mean.getValue()  
            intens_range_now = self.chan_intens_range.getValue()
            for intens_range in self.intensity_ranges:
                if intens_range['index'] is intens_range_now:
                    self.intensity_value = intens_value[self.ampl_chan_index] - intens_range['offset']
                    break
        
        #7. close the fast shutter -------------------------------------------
        self.bl_hwobj.fast_shutter_hwobj.closeShutter(wait=True)

        # 7/7 set back original phase ----------------------------------------
        self.bl_hwobj.diffractometer_hwobj.set_phase(current_phase)
        
        #8. Calculate --------------------------------------------------------  
        energy = self.bl_hwobj._get_energy()
        detector_distance = self.bl_hwobj.detector_hwobj.get_distance()
        beam_size = self.bl_hwobj.collect_hwobj.get_beam_size()
        transmission = self.bl_hwobj.transmission_hwobj.getAttFactor()

        result["result_details"].append("Energy: %.4f keV<br>" % energy)
        result["result_details"].append("Detector distance: %.2f mm<br>" % \
              detector_distance)
        result["result_details"].append("Beam size %.2f x %.2f mm<br>" % \
              (beam_size[0], beam_size[1]))
        result["result_details"].append("Transmission %.2f%%<br><br>" % \
              transmission)

        meas_item = [datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                     "%.4f" % energy,
                     "%.2f" % detector_distance, 
                     "%.2f x %.2f" % (beam_size[0], beam_size[1]), 
                     "%.2f" % transmission]

        air_trsm =  numpy.exp(-self.air_absorption_coeff_per_meter(energy) * \
             detector_distance / 1000.0)
        carb_trsm = self.carbon_window_transmission(energy)
        flux = 0.624151 * 1e16 * self.intensity_value / \
               self.diode_calibration_amp_per_watt(energy) / \
               energy / air_trsm / carb_trsm
        dose_rate = 1e-3 * 1e-14 * self.dose_rate_per_10to14_ph_per_mmsq(energy) * \
               flux / beam_size[0] / beam_size[1]  

        self.bl_hwobj.collect_hwobj.machine_info_hwobj.set_flux(flux)

        msg = "Flux = %1.1e photon/s" % flux
        result["result_details"].append(msg + "<br>")
        logging.getLogger("user_level_log").info(msg)
        result["result_short"] = msg
        meas_item.append("%1.1e" % flux)

        msg = "Dose rate =  %1.1e KGy/s" % dose_rate
        result["result_details"].append(msg + "<br>")
        logging.getLogger("user_level_log").info(msg)
        meas_item.append("%1.1e" % dose_rate)

        msg = "Time to reach 20 MGy = %d s = %d frames " % \
              (20000. / dose_rate, int(25 * 20000. / dose_rate))
        result["result_details"].append(msg + "<br><br>")
        logging.getLogger("user_level_log").info(msg)
        meas_item.append("%1.1e s, %d frames" % \
              (20000. / dose_rate, int(25 * 20000. / dose_rate)))

        self.intensity_measurements.insert(0, meas_item)
        result["result_details"].extend(SimpleHTML.create_table(\
             ["Time", "Energy (keV)", "Detector distance (mm)", "Beam size (mm)",
              "Transmission (%%)", "Flux (photons/s)", "Dose rate (KGy/s)",
              "Time to reach 20 MGy (sec, frames)"], self.intensity_measurements))

        self.ready_event.set()
        return result
예제 #11
0
    def test_measure_intensity(self):
        """Measures intensity and generates report"""
        result = {}
        result["result_bit"] = True
        result["result_details"] = []

        current_phase = self.bl_hwobj.diffractometer_hwobj.current_phase

        # 1. close guillotine and fast shutter -------------------------------
        self.bl_hwobj.collect_hwobj.close_guillotine(wait=True)
        self.bl_hwobj.fast_shutter_hwobj.closeShutter(wait=True)
        gevent.sleep(0.1)

        #2. move back light in, check beamstop position ----------------------
        self.bl_hwobj.back_light_hwobj.move_in()

        beamstop_position = self.bl_hwobj.beamstop_hwobj.get_position()
        if beamstop_position == "BEAM":
            self.bl_hwobj.beamstop_hwobj.set_position("OFF")
            self.bl_hwobj.diffractometer_hwobj.wait_device_ready(30)

        #3. check scintillator position --------------------------------------
        scintillator_position = self.bl_hwobj.\
            diffractometer_hwobj.get_scintillator_position()
        if scintillator_position == "SCINTILLATOR":
            #TODO add state change when scintillator position changed
            self.bl_hwobj.diffractometer_hwobj.\
                 set_scintillator_position("PHOTODIODE")
            gevent.sleep(1)
            self.bl_hwobj.diffractometer_hwobj.\
                 wait_device_ready(30)

        #5. open the fast shutter --------------------------------------------
        self.bl_hwobj.fast_shutter_hwobj.openShutter(wait=True)
        gevent.sleep(0.3)

        #6. measure mean intensity
        self.ampl_chan_index = 0

        if True:
            intens_value = self.chan_intens_mean.getValue()
            intens_range_now = self.chan_intens_range.getValue()
            for intens_range in self.intensity_ranges:
                if intens_range['index'] is intens_range_now:
                    self.intensity_value = intens_value[self.ampl_chan_index] - \
                                           intens_range['offset']
                    break

        #7. close the fast shutter -------------------------------------------
        self.bl_hwobj.fast_shutter_hwobj.closeShutter(wait=True)

        # 7/7 set back original phase ----------------------------------------
        self.bl_hwobj.diffractometer_hwobj.set_phase(current_phase)

        #8. Calculate --------------------------------------------------------
        energy = self.bl_hwobj._get_energy()
        detector_distance = self.bl_hwobj.detector_hwobj.get_distance()
        beam_size = self.bl_hwobj.collect_hwobj.get_beam_size()
        transmission = self.bl_hwobj.transmission_hwobj.getAttFactor()

        result["result_details"].append("Energy: %.4f keV<br>" % energy)
        result["result_details"].append("Detector distance: %.2f mm<br>" % \
                                        detector_distance)
        result["result_details"].append("Beam size %.2f x %.2f mm<br>" % \
                                        (beam_size[0], beam_size[1]))
        result["result_details"].append("Transmission %.2f%%<br><br>" % \
                                        transmission)

        meas_item = [
            datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
            "%.4f" % energy,
            "%.2f" % detector_distance,
            "%.2f x %.2f" % (beam_size[0], beam_size[1]),
            "%.2f" % transmission
        ]

        air_trsm = numpy.exp(-self.air_absorption_coeff_per_meter(energy) * \
             detector_distance / 1000.0)
        carb_trsm = self.carbon_window_transmission(energy)
        flux = 0.624151 * 1e16 * self.intensity_value / \
               self.diode_calibration_amp_per_watt(energy) / \
               energy / air_trsm / carb_trsm

        #GB correcting diode misscalibration!!!
        flux = flux * 1.8

        dose_rate = 1e-3 * 1e-14 * self.dose_rate_per_10to14_ph_per_mmsq(energy) * \
               flux / beam_size[0] / beam_size[1]

        self.bl_hwobj.collect_hwobj.machine_info_hwobj.set_flux(flux)

        msg = "Intensity = %1.1e A" % self.intensity_value
        result["result_details"].append(msg + "<br>")
        logging.getLogger("user_level_log").info(msg)
        result["result_short"] = msg
        meas_item.append("%1.1e" % self.intensity_value)

        msg = "Flux = %1.1e photon/s" % flux
        result["result_details"].append(msg + "<br>")
        logging.getLogger("user_level_log").info(msg)
        result["result_short"] = msg
        meas_item.append("%1.1e" % flux)

        msg = "Dose rate =  %1.1e KGy/s" % dose_rate
        result["result_details"].append(msg + "<br>")
        logging.getLogger("user_level_log").info(msg)
        meas_item.append("%1.1e" % dose_rate)

        msg = "Time to reach 20 MGy = %d s = %d frames " % \
              (20000. / dose_rate, int(25 * 20000. / dose_rate))
        result["result_details"].append(msg + "<br><br>")
        logging.getLogger("user_level_log").info(msg)
        meas_item.append("%1.1e s, %d frames" % \
              (20000. / dose_rate, int(25 * 20000. / dose_rate)))

        self.intensity_measurements.insert(0, meas_item)
        result["result_details"].extend(SimpleHTML.create_table(\
             ["Time", "Energy (keV)", "Detector distance (mm)",
              "Beam size (mm)", "Transmission (%%)", "Intensity (A)",
              "Flux (photons/s)", "Dose rate (KGy/s)",
              "Time to reach 20 MGy (sec, frames)"],
             self.intensity_measurements))

        self.ready_event.set()

        return result
예제 #12
0
    def do_processing_result_polling(self, processing_params, wait_timeout,
                                     grid_object):
        """Method polls processing results. Based on the polling of edna 
           result files. After each result file results are aligned to match 
           the diffractometer configuration.
           If processing succed (files appear before timeout) then a heat map 
           is created and results are stored in ispyb.
           If processing was executed for helical line then heat map as a 
           line plot is generated and best positions are estimated.
           If processing was executed for a grid then 2d plot is generated,
           best positions are estimated and stored in ispyb. Also mesh
           parameters and processing results as a workflow are stored in ispyb.
        Args.     : wait_timeout (file waiting timeout is sec.)
        Return.   : list of 10 best positions. If processing fails returns None 
        """
        processing_result = {
            "image_num": numpy.zeros(processing_params["images_num"]),
            "spots_num": numpy.zeros(processing_params["images_num"]),
            "spots_int_aver": numpy.zeros(processing_params["images_num"]),
            "spots_resolution": numpy.zeros(processing_params["images_num"]),
            "score": numpy.zeros(processing_params["images_num"])
        }

        processing_params["status"] = "Success"
        failed = False
        """
        do_polling = True
        result_file_index = 0
        _result_place = []
        _first_frame_timout = 5 * 60 / 10
        _time_out = _first_frame_timout
        _start_time = time.time()
       
        while _result_place == [] and time.time() - _start_time < _time_out :
           _result_place = glob.glob(os.path.join(processing_params["directory"],"EDApplication*/"))
           gevent.sleep(0.2)
        if _result_place == [] : 
           msg = "ParallelProcessing: Failed to read dozor result directory %s" % processing_params["directory"]
           logging.error(msg)
           processing_params["status"] = "Failed"
           processing_params["comments"] += "Failed: " + msg
           self.emit("processingFailed")
           self.processing_done_event.set()
           failed = True

        while do_polling and not failed:
            file_appeared = False
            result_file_name = os.path.join(_result_place[0],"ResultControlDozor_Chunk_%06d.xml" % result_file_index)
            wait_file_start = time.time()
            logging.debug('ParallelProcessing: Waiting for Dozor result file: %s' % result_file_name)
            while not file_appeared and time.time() - wait_file_start < wait_timeout:
                if os.path.exists(result_file_name) and os.stat(result_file_name).st_size > 0:
                    file_appeared = True
                    _time_out = wait_timeout
                    logging.debug('ParallelProcessing: Dozor file is there, size={0}'.format(os.stat(result_file_name).st_size))
                else:
                    os.system("ls %s > /dev/null" %_result_place[0])
                    gevent.sleep(0.2)
            if not file_appeared:
                failed = True
                msg = 'ParallelProcessing: Dozor result file ({0}) failed to appear after {1} seconds'.\
                      format(result_file_name, wait_timeout)
                logging.error(msg)
                processing_params["status"] = "Failed"
                processing_params["comments"] += "Failed: " + msg
                self.emit("processingFailed")
                
            # poll while the size increasing:
            _oldsize = -1 
            _newsize =  0
            while _oldsize < _newsize :
                _oldsize = _newsize
                _newsize = os.stat(result_file_name).st_size
                gevent.sleep(0.1)
                                
            dozor_output_file = XSDataResultControlDozor.parseFile(result_file_name)
            #this method could be improved with xml parsing
            for dozor_image in dozor_output_file.getImageDozor():
                image_index = dozor_image.getNumber().getValue() - 1
                processing_result["image_num"][image_index] = image_index
                processing_result["spots_num"][image_index] = dozor_image.getSpots_num_of().getValue()
                processing_result["spots_int_aver"][image_index] = dozor_image.getSpots_int_aver().getValue()   
                processing_result["spots_resolution"][image_index] = dozor_image.getSpots_resolution().getValue()
                processing_result["score"][image_index] = dozor_image.getScore().getValue()
                image_index += 1
                do_polling = (dozor_image.getNumber().getValue() != processing_params["images_num"])

            aligned_result = self.align_processing_results(dozor_result, processing_params)
            self.emit("processingSetResult", (aligned_result, processing_params, False))
            result_file_index += 1
        """

        gevent.sleep(10)
        #This is for test...

        for key in processing_result.keys():
            processing_result[key] = numpy.linspace(
                0, processing_params["images_num"],
                processing_params["images_num"]).astype('uint8')

        self.processing_results = self.align_processing_results(\
             processing_result, processing_params, grid_object)

        self.emit("paralleProcessingResults",
                  (self.processing_results, processing_params, True))

        #Processing finished. Results are aligned and 10 best positions estimated
        processing_params["processing_programs"] = "EDNAdozor"
        processing_params["processing_end_time"] = time.strftime(
            "%Y-%m-%d %H:%M:%S")
        best_positions = self.processing_results.get("best_positions", [])

        #If lims used then and mesh then save results in ispyb
        #Autoprocessin program

        if processing_params["lines_num"] > 1:
            logging.getLogger("HWR").info(
                "ParallelProcessing: Saving autoprocessing program in ISPyB")
            autoproc_program_id = self.lims_hwobj.store_autoproc_program(
                processing_params)

            logging.getLogger("HWR").info(
                "ParallelProcessing: Saving processing results in ISPyB")
            workflow_id, workflow_mesh_id, grid_info_id = \
                 self.lims_hwobj.store_workflow(processing_params)
            processing_params["workflow_id"] = workflow_id
            processing_params["workflow_mesh_id"] = workflow_mesh_id
            processing_params["grid_info_id"] = grid_info_id

            self.collect_hwobj.update_lims_with_workflow(
                workflow_id, processing_params["grid_snapshot_filename"])

            #If best positions detected then save them in ispyb
            if len(best_positions) > 0:
                logging.getLogger("HWR").info("ParallelProcessing: Saving %d best positions in ISPyB" % \
                       len(best_positions))

                motor_pos_id_list = []
                image_id_list = []
                for image in best_positions:
                    # Motor position is stored
                    motor_pos_id = self.lims_hwobj.store_centred_position(\
                           image["cpos"], image['col'], image['row'])
                    # Corresponding image is stored
                    image_id = self.collect_hwobj.store_image_in_lims_by_frame_num(\
                         image['index'], motor_pos_id)
                    # Image quality indicators are stored
                    image["image_id"] = image_id
                    image["auto_proc_program"] = autoproc_program_id

                    self.lims_hwobj.store_image_quality_indicators(image)

                    motor_pos_id_list.append(motor_pos_id)
                    image_id_list.append(image_id)

                processing_params["best_position_id"] = motor_pos_id_list[0]
                processing_params["best_image_id"] = image_id_list[0]

                logging.getLogger("HWR").info(
                    "ParallelProcessing: Updating best position in ISPyB")
                self.lims_hwobj.store_workflow(processing_params)
            else:
                logging.getLogger("HWR").info(
                    "ParallelProcessing: No best positions found during the scan"
                )

            try:
                html_filename = os.path.join(
                    processing_params["result_file_path"], "index.html")
                logging.getLogger("HWR").info(
                    "ParallelProcessing: Generating results html %s" %
                    html_filename)
                simpleHtml.generate_mesh_scan_report(self.processing_results,
                                                     processing_params,
                                                     html_filename)
            except:
                logging.getLogger("HWR").exception(
                    "ParallelProcessing: Could not create result html %s" %
                    html_filename)

        # Heat map generation
        fig, ax = plt.subplots(nrows=1, ncols=1)
        if processing_params["lines_num"] > 1:
            #If mesh scan then a 2D plot
            im = ax.imshow(self.processing_results["score"],
                           interpolation='none',
                           aspect='auto',
                           extent=[
                               0, self.processing_results["score"].shape[1], 0,
                               self.processing_results["score"].shape[0]
                           ])
            if len(best_positions) > 0:
                plt.axvline(x=best_positions[0]["col"] - 0.5, linewidth=0.5)
                plt.axhline(y=best_positions[0]["row"] - 0.5, linewidth=0.5)

            divider = make_axes_locatable(ax)
            cax = divider.append_axes("right", size=0.1, pad=0.05)
            cax.tick_params(axis='x', labelsize=8)
            cax.tick_params(axis='y', labelsize=8)
            plt.colorbar(im, cax=cax)
            im.set_cmap('hot')
        else:
            #if helical line then a line plot
            plt.plot(self.processing_results["score"])
            ylim = ax.get_ylim()
            ax.set_ylim((-1, ylim[1]))

        ax.tick_params(axis='x', labelsize=8)
        ax.tick_params(axis='y', labelsize=8)
        ax.set_title(processing_params["title"], fontsize=8)

        ax.grid(True)
        ax.spines['left'].set_position(('outward', 10))
        ax.spines['bottom'].set_position(('outward', 10))

        processing_plot_file = os.path.join(processing_params\
             ["directory"], "parallel_processing_result.png")
        processing_plot_archive_file = os.path.join(processing_params\
             ["processing_archive_directory"], "parallel_processing_result.png")

        try:
            logging.getLogger("HWR").info(
                "ParallelProcessing: Saving heat map figure %s" %
                processing_plot_file)
            if not os.path.exists(os.path.dirname(processing_plot_file)):
                os.makedirs(os.path.dirname(processing_plot_file))
            fig.savefig(processing_plot_file, dpi=150, bbox_inches='tight')
        except:
            logging.getLogger("HWR").exception(
                "ParallelProcessing: Could not save figure %s" %
                processing_plot_file)
        try:
            logging.getLogger("HWR").info(
                "ParallelProcessing: Saving heat map figure for ISPyB %s" %
                processing_plot_archive_file)
            if not os.path.exists(
                    os.path.dirname(processing_plot_archive_file)):
                os.makedirs(os.path.dirname(processing_plot_archive_file))
            fig.savefig(processing_plot_archive_file,
                        dpi=150,
                        bbox_inches='tight')
        except:
            logging.getLogger("HWR").exception(
                "ParallelProcessing: Could not save figure for ISPyB %s" %
                processing_plot_archive_file)
        plt.close(fig)
        self.processing_done_event.set()
예제 #13
0
    def store_processing_results(self, status):
        """Stores result plots. In the case of MeshScan and XrayCentering
           html is created and results saved in ISPyB

        :param status: status type
        :type status: str
        """
        log = logging.getLogger("HWR")

        self.started = False
        self.params_dict["status"] = status

        # ---------------------------------------------------------------------
        # Assembling all file names
        self.params_dict["max_dozor_score"] = self.results_aligned[
            "score"].max()
        best_positions = self.results_aligned.get("best_positions", [])

        processing_grid_overlay_file = os.path.join(
            self.params_dict["archive_directory"], "grid_overlay.png")
        # processing_plot_archive_file = os.path.join(
        #    self.params_dict["archive_directory"], "parallel_processing_plot.png"
        # )
        processing_csv_archive_file = os.path.join(
            self.params_dict["archive_directory"],
            "parallel_processing_score.csv")

        # If MeshScan and XrayCentring then info is stored in ISPyB
        if self.params_dict["workflow_type"] in (
                "MeshScan",
                "XrayCentering",
                "LineScan",
        ):
            if self.workflow_info is not None:
                self.params_dict["workflow_id"] = self.workflow_info[
                    "workflow_id"]

            (
                workflow_id,
                workflow_mesh_id,
                grid_info_id,
            ) = HWR.beamline.lims.store_workflow(self.params_dict)

            self.params_dict["workflow_id"] = workflow_id
            self.params_dict["workflow_mesh_id"] = workflow_mesh_id
            self.params_dict["grid_info_id"] = grid_info_id

            if self.params_dict[
                    "workflow_type"] == "XrayCentering" and self.grid:
                self.workflow_info = {
                    "workflow_id":
                    self.params_dict["workflow_id"],
                    "process_root_directory":
                    self.params_dict["process_root_directory"],
                    "archive_root_directory":
                    self.params_dict["archive_root_directory"],
                }
            else:
                self.workflow_info = None

            HWR.beamline.collect.update_lims_with_workflow(
                workflow_id,
                os.path.join(self.params_dict["archive_directory"],
                             "snapshot.png"),
            )

            HWR.beamline.lims.store_workflow_step(self.params_dict)
            if len(best_positions) > 0:
                HWR.beamline.collect.store_image_in_lims_by_frame_num(
                    best_positions[0]["index"])
            log.info("Parallel processing: Results saved in ISPyB")

        HWR.beamline.lims.set_image_quality_indicators_plot(
            HWR.beamline.collect.collection_id,
            self.params_dict["cartography_path"],
            self.params_dict["csv_file_path"],
        )

        fig, ax = plt.subplots(nrows=1, ncols=1)
        if self.grid:
            current_max = max(fig.get_size_inches())
            grid_width = self.params_dict["steps_x"] * self.params_dict[
                "xOffset"]
            grid_height = self.params_dict["steps_y"] * self.params_dict[
                "yOffset"]

            if grid_width > grid_height:
                fig.set_size_inches(current_max,
                                    current_max * grid_height / grid_width)
            else:
                fig.set_size_inches(current_max * grid_width / grid_height,
                                    current_max)

            im = ax.imshow(
                np.transpose(self.results_aligned["score"]),
                interpolation="none",
                aspect="auto",
                extent=[
                    0,
                    self.results_aligned["score"].shape[0],
                    0,
                    self.results_aligned["score"].shape[1],
                ],
            )
            im.set_cmap("hot")

            try:
                if not os.path.exists(
                        os.path.dirname(processing_grid_overlay_file)):
                    os.makedirs(os.path.dirname(processing_grid_overlay_file))

                plt.imsave(
                    processing_grid_overlay_file,
                    np.transpose(self.results_aligned["score"]),
                    format="png",
                    cmap="hot",
                )
                self.grid.set_overlay_pixmap(processing_grid_overlay_file)
                log.info("Parallel processing: Grid overlay figure saved %s" %
                         processing_grid_overlay_file)
            except Exception:
                log.exception(
                    "Parallel processing: Could not save grid overlay figure %s"
                    % processing_grid_overlay_file)

            if len(best_positions) > 0:
                plt.axvline(x=best_positions[0]["col"], linewidth=0.5)
                plt.axhline(y=best_positions[0]["row"], linewidth=0.5)

                divider = make_axes_locatable(ax)
                cax = divider.append_axes("right", size=0.1, pad=0.05)
                cax.tick_params(axis="x", labelsize=8)
                cax.tick_params(axis="y", labelsize=8)
                plt.colorbar(im, cax=cax)
        else:
            # max_resolution = self.params_dict["resolution"]
            # min_resolution = self.results_aligned["spots_resolution"].max()

            # TODO plot results based on the result_name_list
            max_score = self.results_aligned["score"].max()
            if max_score == 0:
                max_score = 1
            max_spots_num = self.results_aligned["spots_num"].max()
            if max_spots_num == 0:
                max_spots_num = 1

            plt.plot(self.results_aligned["score"] / max_score,
                     ".",
                     label="Score",
                     c="r")
            plt.plot(
                self.results_aligned["spots_num"] / max_spots_num,
                ".",
                label="Number of spots",
                c="b",
            )
            plt.plot(self.results_aligned["spots_resolution"],
                     ".",
                     label="Resolution",
                     c="y")

            ax.legend(
                loc="lower center",
                fancybox=True,
                numpoints=1,
                borderaxespad=0.0,
                bbox_to_anchor=(0.5, -0.13),
                ncol=3,
                fontsize=8,
            )
            ax.set_ylim(-0.01, 1.1)
            ax.set_xlim(0, self.params_dict["images_num"])

            positions = np.linspace(
                0, self.results_aligned["spots_resolution"].max(), 5)
            labels = ["inf"]
            for item in positions[1:]:
                labels.append("%.2f" % (1.0 / item))
            ax.set_yticks(positions)
            ax.set_yticklabels(labels)

            # new_labels = numpy.linspace(min_resolution, max_resolution / 1.2, len(ax.get_yticklabels()))
            # new_labels = numpy.round(new_labels, 1)
            # ax.set_yticklabels(new_labels)
            ax.set_ylabel("Resolution")

            ay1 = ax.twinx()
            new_labels = np.linspace(
                0,
                self.results_aligned["spots_num"].max(),
                len(ay1.get_yticklabels()),
                dtype=np.int16,
            )
            ay1.set_yticklabels(new_labels)
            ay1.set_ylabel("Number of spots")

        # ---------------------------------------------------------------------
        ax.tick_params(axis="x", labelsize=8)
        ax.tick_params(axis="y", labelsize=8)
        ax.set_title(self.params_dict["title"], fontsize=8)

        ax.grid(True)
        ax.spines["left"].set_position(("outward", 10))
        ax.spines["bottom"].set_position(("outward", 10))

        # ---------------------------------------------------------------------
        # Stores plot in the processing directory
        try:
            if not os.path.exists(
                    os.path.dirname(self.params_dict["cartography_path"])):
                os.makedirs(
                    os.path.dirname(self.params_dict["cartography_path"]))
            fig.savefig(self.params_dict["cartography_path"],
                        dpi=100,
                        bbox_inches="tight")
            log.info("Parallel processing: Plot saved in %s" %
                     self.params_dict["cartography_path"])
        except Exception:
            log.exception("Parallel processing: Could not save plot in %s" %
                          self.params_dict["cartography_path"])

        # ---------------------------------------------------------------------
        # Stores plot for ISPyB
        try:
            if not os.path.exists(
                    os.path.dirname(self.params_dict["cartography_path"])):
                os.makedirs(
                    os.path.dirname(self.params_dict["cartography_path"]))
            fig.savefig(self.params_dict["cartography_path"],
                        dpi=100,
                        bbox_inches="tight")
            log.info("Parallel processing: Plot for ISPyB saved in %s" %
                     self.params_dict["cartography_path"])
        except Exception:
            log.exception(
                "Parallel processing: Could not save plot for ISPyB %s" %
                self.params_dict["cartography_path"])

        plt.close(fig)

        # ---------------------------------------------------------------------
        # Generates html and json files
        try:
            SimpleHTML.generate_parallel_processing_report(
                self.results_aligned, self.params_dict)
            log.info("Parallel processing: Html report saved in %s" %
                     self.params_dict["html_file_path"])
            log.info("Parallel processing: Json report saved in %s" %
                     self.params_dict["json_file_path"])
        except Exception:
            log.exception(
                "Parallel processing: Could not save results html %s" %
                self.params_dict["html_file_path"])
            log.exception(
                "Parallel processing: Could not save json results in %s" %
                self.params_dict["json_file_path"])

        # ---------------------------------------------------------------------
        # Writes results in the csv file
        try:
            processing_csv_file = open(processing_csv_archive_file, "w")
            processing_csv_file.write(
                "%s,%d,%d,%d,%d,%d,%s,%d,%d,%f,%f,%s\n" % (
                    self.params_dict["template"],
                    self.params_dict["first_image_num"],
                    self.params_dict["images_num"],
                    self.params_dict["run_number"],
                    self.params_dict["run_number"],
                    self.params_dict["lines_num"],
                    str(self.params_dict["reversing_rotation"]),
                    HWR.beamline.detector.get_pixel_min(),
                    HWR.beamline.detector.get_pixel_max(),
                    self.beamstop_hwobj.get_size(),
                    self.beamstop_hwobj.get_distance(),
                    self.beamstop_hwobj.get_direction(),
                ))
            for index in range(self.params_dict["images_num"]):
                processing_csv_file.write("%d,%f,%d,%f\n" % (
                    index,
                    self.results_raw["score"][index],
                    self.results_raw["spots_num"][index],
                    self.results_raw["spots_resolution"][index],
                ))
            log.info("Parallel processing: Raw data stored in %s" %
                     processing_csv_archive_file)
            processing_csv_file.close()
        except Exception:
            log.error("Parallel processing: Unable to store raw data in %s" %
                      processing_csv_archive_file)