Ejemplo n.º 1
0
    def workflow_selected(self):
        # necessary as this comes in as a QString object
        name = ConvertUtils.text_type(self._workflow_cbox.currentText())
        # if reset or name != self._previous_workflow:
        xx0 = self._workflow_cbox
        xx0.setCurrentIndex(xx0.findText(name))
        self.init_models()
        self._data_path_widget.update_data_model(self._path_template)

        parameters = api.gphl_workflow.get_available_workflows()[name]
        strategy_type = parameters.get("strategy_type")
        if strategy_type == "transcal":
            # NB Once we do not have to set unique prefixes, this should be readOnly
            self._data_path_widget.data_path_layout.prefix_ledit.setReadOnly(False)
            self._gphl_acq_widget.hide()
        elif strategy_type == "diffractcal":
            # TODO update this
            self._data_path_widget.data_path_layout.prefix_ledit.setReadOnly(True)
            self._gphl_diffractcal_widget.populate_widget()
            self._gphl_acq_widget.show()
            self._gphl_diffractcal_widget.show()
            self._gphl_acq_param_widget.hide()
        else:
            # acquisition type strategy
            self._data_path_widget.data_path_layout.prefix_ledit.setReadOnly(True)
            self._gphl_acq_param_widget.populate_widget()
            self._gphl_acq_widget.show()
            self._gphl_diffractcal_widget.hide()
            self._gphl_acq_param_widget.show()

        prefix = parameters.get("prefix")
        if prefix is not None:
            self.current_prefix = prefix
Ejemplo n.º 2
0
 def _CollectionDone_to_java(self, collectionDone):
     jvm = self._gateway.jvm
     proposalId = jvm.java.util.UUID.fromString(
         ConvertUtils.text_type(collectionDone.proposalId)
     )
     return jvm.astra.messagebus.messages.information.CollectionDoneImpl(
         proposalId, collectionDone.imageRoot, collectionDone.status
     )
 def _CollectionDone_to_java(self, collectionDone):
     jvm = self._gateway.jvm
     proposalId = jvm.java.util.UUID.fromString(
         ConvertUtils.text_type(collectionDone.proposalId)
     )
     return jvm.astra.messagebus.messages.information.CollectionDoneImpl(
         proposalId, collectionDone.imageRoot, collectionDone.status
     )
Ejemplo n.º 4
0
 def open_file_dialog(self):
     start_path = os.path.dirname(
         ConvertUtils.text_type(self.filepath.text()))
     if not os.path.exists(start_path):
         start_path = ""
     path = QtImport.QFileDialog(self).getOpenFileName(directory=start_path)
     if not path.isNull():
         self.filepath.setText(path)
    def _PhasingWavelength_to_java(self, phasingWavelength):
        jvm = self._gateway.jvm

        if phasingWavelength is None:
            return None

        javaUuid = self._gateway.jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(phasingWavelength.id_))
        return jvm.astra.messagebus.messages.information.PhasingWavelengthImpl(
            javaUuid, float(phasingWavelength.wavelength),
            phasingWavelength.role)
    def _GoniostatTranslation_to_java(self, goniostatTranslation):
        jvm = self._gateway.jvm

        if goniostatTranslation is None:
            return None

        gts = goniostatTranslation
        javaUuid = jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(gts.id_))
        javaRotationId = jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(gts.requestedRotationId))
        axisSettings = dict(
            ((x, float(y)) for x, y in gts.axisSettings.items()))
        newRotation = gts.newRotation
        if newRotation:
            javaNewRotation = self._GoniostatRotation_to_java(newRotation)
            return jvm.astra.messagebus.messages.instrumentation.GoniostatTranslationImpl(
                axisSettings, javaUuid, javaRotationId, javaNewRotation)
        else:
            return jvm.astra.messagebus.messages.instrumentation.GoniostatTranslationImpl(
                axisSettings, javaUuid, javaRotationId)
    def _PhasingWavelength_to_java(self, phasingWavelength):
        jvm = self._gateway.jvm

        if phasingWavelength is None:
            return None

        javaUuid = self._gateway.jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(phasingWavelength.id_)
        )
        return jvm.astra.messagebus.messages.information.PhasingWavelengthImpl(
            javaUuid, float(phasingWavelength.wavelength), phasingWavelength.role
        )
    def _BeamstopSetting_to_java(self, beamStopSetting):
        jvm = self._gateway.jvm

        if beamStopSetting is None:
            return None

        javaUuid = jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(beamStopSetting.id_))
        axisSettings = dict(
            ((x, float(y)) for x, y in beamStopSetting.axisSettings.items()))
        return jvm.astra.messagebus.messages.instrumentation.BeamstopSettingImpl(
            axisSettings, javaUuid)
    def _GoniostatTranslation_to_java(self, goniostatTranslation):
        jvm = self._gateway.jvm

        if goniostatTranslation is None:
            return None

        gts = goniostatTranslation
        javaUuid = jvm.java.util.UUID.fromString(ConvertUtils.text_type(gts.id_))
        javaRotationId = jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(gts.requestedRotationId)
        )
        axisSettings = dict(((x, float(y)) for x, y in gts.axisSettings.items()))
        newRotation = gts.newRotation
        if newRotation:
            javaNewRotation = self._GoniostatRotation_to_java(newRotation)
            return jvm.astra.messagebus.messages.instrumentation.GoniostatTranslationImpl(
                axisSettings, javaUuid, javaRotationId, javaNewRotation
            )
        else:
            return jvm.astra.messagebus.messages.instrumentation.GoniostatTranslationImpl(
                axisSettings, javaUuid, javaRotationId
            )
    def _BeamstopSetting_to_java(self, beamStopSetting):
        jvm = self._gateway.jvm

        if beamStopSetting is None:
            return None

        javaUuid = jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(beamStopSetting.id_)
        )
        axisSettings = dict(
            ((x, float(y)) for x, y in beamStopSetting.axisSettings.items())
        )
        return jvm.astra.messagebus.messages.instrumentation.BeamstopSettingImpl(
            axisSettings, javaUuid
        )
    def _GoniostatRotation_to_java(self, goniostatRotation):
        jvm = self._gateway.jvm

        if goniostatRotation is None:
            return None

        grs = goniostatRotation
        javaUuid = jvm.java.util.UUID.fromString(ConvertUtils.text_type(grs.id_))
        axisSettings = dict(((x, float(y)) for x, y in grs.axisSettings.items()))
        # NBNB The final None is necessary because there is no non-deprecated
        # constructor that takes two UUIDs. Eventually the deprecated
        # constructor will disappear and we can remove the None
        return jvm.astra.messagebus.messages.instrumentation.GoniostatRotationImpl(
            axisSettings, javaUuid, None
        )
Ejemplo n.º 12
0
    def _GoniostatRotation_to_java(self, goniostatRotation):
        jvm = self._gateway.jvm

        if goniostatRotation is None:
            return None

        grs = goniostatRotation
        javaUuid = jvm.java.util.UUID.fromString(ConvertUtils.text_type(grs.id_))
        axisSettings = dict(((x, float(y)) for x, y in grs.axisSettings.items()))
        # NBNB The final None is necessary because there is no non-deprecated
        # constructor that takes two UUIDs. Eventually the deprecated
        # constructor will disappear and we can remove the None
        return jvm.astra.messagebus.messages.instrumentation.GoniostatRotationImpl(
            axisSettings, javaUuid, None
        )
Ejemplo n.º 13
0
 def populateColumn(self, colNum, values, colours=None):
     """Fill values into column, extending if necessary"""
     if len(values) > self.rowCount():
         self.setRowCount(len(values))
     for rowNum, text in enumerate(values):
         wdg = QtImport.QLineEdit(self)
         wdg.setFont(QtImport.QFont("Courier"))
         wdg.setReadOnly(True)
         wdg.setText(ConvertUtils.text_type(text))
         if colours:
             colour = colours[rowNum]
             if colour:
                 Colors.set_widget_color(wdg, getattr(Colors, colour),
                                         QtImport.QPalette.Base)
                 # wdg.setBackground(getattr(QtImport.QColor, colour))
         self.setCellWidget(rowNum, colNum, wdg)
 def _PriorInformation_to_java(self, priorInformation):
     jvm = self._gateway.jvm
     buildr = jvm.astra.messagebus.messages.information.PriorInformationImpl.Builder(
         jvm.java.util.UUID.fromString(
             ConvertUtils.text_type(priorInformation.sampleId)))
     xx0 = priorInformation.sampleName
     if xx0:
         buildr = buildr.sampleName(xx0)
     xx0 = priorInformation.rootDirectory
     if xx0:
         buildr = buildr.rootDirectory(xx0)
     # images not implemented yet - awaiting uses
     # indexingResults not implemented yet - awaiting uses
     buildr = buildr.userProvidedInfo(
         self._UserProvidedInfo_to_java(priorInformation.userProvidedInfo))
     #
     return buildr.build()
 def _PriorInformation_to_java(self, priorInformation):
     jvm = self._gateway.jvm
     buildr = jvm.astra.messagebus.messages.information.PriorInformationImpl.Builder(
         jvm.java.util.UUID.fromString(
             ConvertUtils.text_type(priorInformation.sampleId)
         )
     )
     xx0 = priorInformation.sampleName
     if xx0:
         buildr = buildr.sampleName(xx0)
     xx0 = priorInformation.rootDirectory
     if xx0:
         buildr = buildr.rootDirectory(xx0)
     # images not implemented yet - awaiting uses
     # indexingResults not implemented yet - awaiting uses
     buildr = buildr.userProvidedInfo(
         self._UserProvidedInfo_to_java(priorInformation.userProvidedInfo)
     )
     #
     return buildr.build()
    def _BcsDetectorSetting_to_java(self, bcsDetectorSetting):
        jvm = self._gateway.jvm

        if bcsDetectorSetting is None:
            return None

        orgxy = bcsDetectorSetting.orgxy
        # Need (temporarily?) because there is a primitive array, not a list,
        # on the other side
        orgxy_array = self._gateway.new_array(jvm.double, 2)
        orgxy_array[0] = orgxy[0]
        orgxy_array[1] = orgxy[1]
        axisSettings = dict(
            ((x, float(y))
             for x, y in bcsDetectorSetting.axisSettings.items()))
        javaUuid = jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(bcsDetectorSetting.id_))
        return jvm.astra.messagebus.messages.instrumentation.BcsDetectorSettingImpl(
            float(bcsDetectorSetting.resolution), orgxy_array, axisSettings,
            javaUuid)
    def _BcsDetectorSetting_to_java(self, bcsDetectorSetting):
        jvm = self._gateway.jvm

        if bcsDetectorSetting is None:
            return None

        orgxy = bcsDetectorSetting.orgxy
        # Need (temporarily?) because there is a primitive array, not a list,
        # on the other side
        orgxy_array = self._gateway.new_array(jvm.double, 2)
        orgxy_array[0] = orgxy[0]
        orgxy_array[1] = orgxy[1]
        axisSettings = dict(
            ((x, float(y)) for x, y in bcsDetectorSetting.axisSettings.items())
        )
        javaUuid = jvm.java.util.UUID.fromString(
            ConvertUtils.text_type(bcsDetectorSetting.id_)
        )
        return jvm.astra.messagebus.messages.instrumentation.BcsDetectorSettingImpl(
            float(bcsDetectorSetting.resolution), orgxy_array, axisSettings, javaUuid
        )
Ejemplo n.º 18
0
    def workflow_selected(self):
        # necessary as this comes in as a QString object
        name = ConvertUtils.text_type(self._workflow_cbox.currentText())
        # if reset or name != self._previous_workflow:
        xx0 = self._workflow_cbox
        xx0.setCurrentIndex(xx0.findText(name))
        self.init_models()
        self._data_path_widget.update_data_model(self._path_template)

        parameters = HWR.beamline.gphl_workflow.get_available_workflows()[name]
        strategy_type = parameters.get("strategy_type")
        if strategy_type == "transcal":
            # NB Once we do not have to set unique prefixes, this should be readOnly
            self._data_path_widget.data_path_layout.prefix_ledit.setReadOnly(
                False)
            self._gphl_acq_widget.hide()
        elif strategy_type == "diffractcal":
            # TODO update this
            self._data_path_widget.data_path_layout.prefix_ledit.setReadOnly(
                True)
            self._gphl_diffractcal_widget.populate_widget()
            self._gphl_acq_widget.show()
            self._gphl_diffractcal_widget.show()
            self._gphl_acq_param_widget.hide()
        else:
            # acquisition type strategy
            self._data_path_widget.data_path_layout.prefix_ledit.setReadOnly(
                True)
            self._gphl_acq_param_widget.populate_widget()
            self._gphl_acq_widget.show()
            self._gphl_diffractcal_widget.hide()
            self._gphl_acq_param_widget.show()

        prefix = parameters.get("prefix")
        if prefix is not None:
            self.current_prefix = prefix
Ejemplo n.º 19
0
 def get_value(self):
     return ConvertUtils.text_type(self.text())
Ejemplo n.º 20
0
 def get_value(self):
     val = int(self.value())
     return ConvertUtils.text_type(val)
Ejemplo n.º 21
0
    def _create_task(self, sample, shape):
        tasks = []

        path_template = self._create_path_template(sample, self._path_template)
        path_template.num_files = 0
        path_template.compression = False

        workflow_hwobj = HWR.beamline.gphl_workflow
        if workflow_hwobj.get_state() == States.OFF:
            # We will be setting up the connection now - time to connect to quit
            QtImport.QApplication.instance().aboutToQuit.connect(
                workflow_hwobj.shutdown)

            tree_brick = self._tree_brick
            if tree_brick:
                tree_brick.dc_tree_widget.confirm_dialog.continueClickedSignal.connect(
                    self.continue_button_click)

        wf = queue_model_objects.GphlWorkflow()
        wf_type = ConvertUtils.text_type(self._workflow_cbox.currentText())
        wf.set_type(wf_type)

        if self.current_prefix:
            path_template.base_prefix = self.current_prefix
        wf.path_template = path_template
        wf.set_name(wf.path_template.get_prefix())
        wf.set_number(wf.path_template.run_number)

        wf_parameters = workflow_hwobj.get_available_workflows()[wf_type]
        strategy_type = wf_parameters.get("strategy_type")
        wf.set_interleave_order(wf_parameters.get("interleaveOrder", ""))
        if strategy_type.startswith("transcal"):
            pass

        elif strategy_type.startswith("diffractcal"):
            ss0 = self._gphl_diffractcal_widget.get_parameter_value(
                "test_crystal")
            crystal_data = self._gphl_diffractcal_widget.test_crystals.get(ss0)
            wf.set_space_group(crystal_data.space_group)
            wf.set_cell_parameters(
                tuple(
                    getattr(crystal_data, tag)
                    for tag in ("a", "b", "c", "alpha", "beta", "gamma")))
            tag = self._gphl_acq_param_widget.get_parameter_value(
                "dose_budget")
            wf.set_dose_budget(
                HWR.beamline.gphl_workflow.dose_budgets.get(tag))
            # The entire strategy runs as a 'characterisation'
            wf.set_characterisation_budget_fraction(1.0)
        else:
            # Coulds be native_... phasing_... etc.

            wf.set_space_group(
                self._gphl_acq_param_widget.get_parameter_value("space_group"))
            wf.set_characterisation_strategy(
                self._gphl_acq_param_widget.get_parameter_value(
                    "characterisation_strategy"))
            tag = self._gphl_acq_param_widget.get_parameter_value(
                "crystal_system")
            crystal_system, point_group = None, None
            if tag:
                data = self._gphl_acq_param_widget._CRYSTAL_SYSTEM_DATA[tag]
                crystal_system = data.crystal_system
                point_groups = data.point_groups
                if len(point_groups) == 1 or point_groups[0] == "32":
                    # '32' is a special case; '312' and '321' are also returned as '32'
                    point_group = point_groups[0]
            wf.set_point_group(point_group)
            wf.set_crystal_system(crystal_system)
            wf.set_beam_energies(wf_parameters["beam_energies"])
            tag = self._gphl_acq_param_widget.get_parameter_value(
                "dose_budget")
            wf.set_dose_budget(
                HWR.beamline.gphl_workflow.dose_budgets.get(tag))
            val = self._gphl_acq_param_widget.get_parameter_value(
                "relative_rad_sensitivity")
            wf.set_relative_rad_sensitivity(val)
            wf.set_characterisation_budget_fraction(
                HWR.beamline.gphl_workflow.getProperty(
                    "characterisation_budget_percent", 5.0) / 100.0)

        tasks.append(wf)

        return tasks
    def _get_simcal_input(self, data_collect_parameters, crystal_data):
        """Get ordered dict with simcal input from available data"""

        # Set up and add crystal data
        result = OrderedDict()
        setup_data = result["setup_list"] = crystal_data

        # update with instrument data
        fp0 = api.gphl_workflow.file_paths.get("instrumentation_file")
        instrument_input = f90nml.read(fp0)

        instrument_data = instrument_input["sdcp_instrument_list"]
        segments = instrument_input["segment_list"]
        if isinstance(segments, dict):
            segment_count = 1
        else:
            segment_count = len(segments)

        sweep_count = len(data_collect_parameters["oscillation_sequence"])

        # Move beamstop settings to top level
        ll0 = instrument_data.get("beamstop_param_names")
        ll1 = instrument_data.get("beamstop_param_vals")
        if ll0 and ll1:
            for tag, val in zip(ll0, ll1):
                instrument_data[tag.lower()] = val

        # Setting parameters in order (may not be necessary, but ...)
        # Missing: *mu*
        remap = {
            "beam": "nominal_beam_dir",
            "det_coord_def": "det_org_dist",
            "cone_s_height": "cone_height",
        }
        tags = (
            "lambda_sd",
            "beam",
            "beam_sd_deg",
            "pol_plane_n",
            "pol_frac",
            "d_sensor",
            "min_zeta",
            "det_name",
            "det_x_axis",
            "det_y_axis",
            "det_qx",
            "det_qy",
            "det_nx",
            "det_ny",
            "det_org_x",
            "det_org_y",
            "det_coord_def",
        )
        for tag in tags:
            val = instrument_data.get(remap.get(tag, tag))
            if val is not None:
                setup_data[tag] = val

        ll0 = instrument_data["gonio_axis_dirs"]
        setup_data["omega_axis"] = ll0[:3]
        setup_data["kappa_axis"] = ll0[3:6]
        setup_data["phi_axis"] = ll0[6:]
        ll0 = instrument_data["gonio_centring_axis_dirs"]
        setup_data["trans_x_axis"] = ll0[:3]
        setup_data["trans_y_axis"] = ll0[3:6]
        setup_data["trans_z_axis"] = ll0[6:]
        tags = (
            "cone_radius",
            "cone_s_height",
            "beam_stop_radius",
            "beam_stop_s_length",
            "beam_stop_s_distance",
        )
        for tag in tags:
            val = instrument_data.get(remap.get(tag, tag))
            if val is not None:
                setup_data[tag] = val

        # Add/overwrite parameters from emulator configuration
        conv = ConvertUtils.convert_string_value
        for key, val in self["simcal_parameters"].getProperties().items():
            setup_data[key] = conv(val)

        setup_data["n_vertices"] = 0
        setup_data["n_triangles"] = 0
        setup_data["n_segments"] = segment_count
        setup_data["n_orients"] = 0
        setup_data["n_sweeps"] = sweep_count

        # Add segments
        result["segment_list"] = segments

        # Adjustments
        val = instrument_data.get("beam")
        if val:
            setup_data["beam"] = val

        # update with diffractcal data
        # TODO check that this works also for updating segment list
        fp0 = api.gphl_workflow.file_paths.get("diffractcal_file")
        if os.path.isfile(fp0):
            diffractcal_data = f90nml.read(fp0)["sdcp_instrument_list"]
            for tag in setup_data.keys():
                val = diffractcal_data.get(tag)
                if val is not None:
                    setup_data[tag] = val
            ll0 = diffractcal_data["gonio_axis_dirs"]
            setup_data["omega_axis"] = ll0[:3]
            setup_data["kappa_axis"] = ll0[3:6]
            setup_data["phi_axis"] = ll0[6:]

        # get resolution limit and detector distance
        detector_distance = data_collect_parameters.get("detector_distance", 0.0)
        if not detector_distance:
            resolution = data_collect_parameters["resolution"]["upper"]
            self.set_resolution(resolution)
            detector_distance = self.get_detector_distance()
        # Add sweeps
        sweeps = []
        for osc in data_collect_parameters["oscillation_sequence"]:
            motors = data_collect_parameters["motors"]
            sweep = OrderedDict()

            sweep["lambda"] = ConvertUtils.H_OVER_E / data_collect_parameters["energy"]
            sweep["res_limit"] = setup_data["res_limit_def"]
            sweep["exposure"] = osc["exposure_time"]
            ll0 = api.gphl_workflow.translation_axis_roles
            sweep["trans_xyz"] = list(motors.get(x) or 0.0 for x in ll0)
            sweep["det_coord"] = detector_distance
            # NBNB hardwired for omega scan TODO
            sweep["axis_no"] = 3
            sweep["omega_deg"] = osc["start"]
            # NB kappa and phi are overwritten from the motors dict, if set there
            sweep["kappa_deg"] = osc["kappaStart"]
            sweep["phi_deg"] = osc["phiStart"]
            sweep["step_deg"] = osc["range"]
            sweep["n_frames"] = osc["number_of_images"]
            sweep["image_no"] = osc["start_image_number"]
            # self.make_image_file_template(data_collect_parameters, suffix='cbf')

            # Extract format statement from template,
            # and convert to fortran format
            template = data_collect_parameters["fileinfo"]["template"]
            ss0 = str(re.search("(%[0-9]+d)", template).group(0))
            template = template.replace(ss0, "?" * int(ss0[1:-1]))
            name_template = os.path.join(
                data_collect_parameters["fileinfo"]["directory"],
                template
                # data_collect_parameters['fileinfo']['template']
            )
            sweep["name_template"] = ConvertUtils.to_ascii(name_template)

            # Overwrite kappa and phi from motors - if set
            val = motors.get("kappa")
            if val is not None:
                sweep["kappa_deg"] = val
            val = motors.get("kappa_phi")
            if val is not None:
                sweep["phi_deg"] = val

            # Skipped: spindle_deg=0.0, two_theta_deg=0.0, mu_air=-1, mu_sensor=-1

            sweeps.append(sweep)

        if sweep_count == 1:
            # NBNB in current code we can have only one sweep here,
            # but it will work for multiple
            result["sweep_list"] = sweep
        else:
            result["sweep_list"] = sweeps
        #
        return result
    def data_collection_hook(self):
        """Spawns data emulation using gphl simcal"""

        data_collect_parameters = self.current_dc_parameters

        if not api.gphl_workflow:
            raise ValueError("Emulator requires GPhL workflow installation")
        gphl_connection = api.gphl_connection
        if not gphl_connection:
            raise ValueError("Emulator requires GPhL connection installation")

        # Get program locations
        simcal_executive = gphl_connection.get_executable("simcal")
        # Get environmental variables
        envs = {
            "BDG_home": gphl_connection.software_paths["BDG_home"],
            "GPHL_INSTALLATION": gphl_connection.software_paths[
                "GPHL_INSTALLATION"
            ],
        }
        for tag, val in self["environment_variables"].getProperties().items():
            envs[str(tag)] = str(val)

        # get crystal data
        sample_name = self.getProperty("default_sample_name")
        sample = self.sample_changer_hwobj.getLoadedSample()
        if sample:
            ss0 = sample.getName()
            if ss0 and ss0.startswith(self.TEST_SAMPLE_PREFIX):
                sample_name = ss0[len(self.TEST_SAMPLE_PREFIX) :]

        sample_dir = gphl_connection.software_paths.get("gphl_test_samples")
        if not sample_dir:
            raise ValueError("Emulator requires gphl_test_samples dir specified")
        sample_dir = os.path.join(sample_dir, sample_name)
        if not os.path.isdir(sample_dir):
            raise ValueError("Sample data directory %s does not exist" % sample_dir)
        crystal_file = os.path.join(sample_dir, "crystal.nml")
        if not os.path.isfile(crystal_file):
            raise ValueError(
                "Emulator crystal data file %s does not exist" % crystal_file
            )
        # in spite of the simcal_crystal_list name this returns an OrderdDict
        crystal_data = f90nml.read(crystal_file)["simcal_crystal_list"]
        if isinstance(crystal_data, list):
            crystal_data = crystal_data[0]

        input_data = self._get_simcal_input(data_collect_parameters, crystal_data)

        # NB outfile is the echo output of the input file;
        # image files templates are set in the input file
        file_info = data_collect_parameters["fileinfo"]
        if not os.path.exists(file_info["directory"]):
            os.makedirs(file_info["directory"])
        if not os.path.exists(file_info["directory"]):
            os.makedirs(file_info["directory"])
        infile = os.path.join(
            file_info["directory"], "simcal_in_%s.nml" % self._counter
        )

        f90nml.write(input_data, infile, force=True)
        outfile = os.path.join(
            file_info["directory"], "simcal_out_%s.nml" % self._counter
        )
        logfile = os.path.join(
            file_info["directory"], "simcal_log_%s.txt" % self._counter
        )
        self._counter += 1
        hklfile = os.path.join(sample_dir, "sample.hkli")
        if not os.path.isfile(hklfile):
            raise ValueError("Emulator hkli file %s does not exist" % hklfile)
        command_list = [
            simcal_executive,
            "--input",
            infile,
            "--output",
            outfile,
            "--hkl",
            hklfile,
        ]

        for tag, val in self["simcal_options"].getProperties().items():
            command_list.extend(ConvertUtils.command_option(tag, val, prefix="--"))
        logging.getLogger("HWR").info("Executing command: %s", command_list)
        logging.getLogger("HWR").info(
            "Executing environment: %s" % sorted(envs.items())
        )

        fp1 = open(logfile, "w")
        fp2 = subprocess.STDOUT
        # resource.setrlimit(resource.RLIMIT_STACK, (-1,-1))

        try:
            running_process = subprocess.Popen(
                command_list, stdout=fp1, stderr=fp2, env=envs
            )
            gphl_connection.collect_emulator_process = running_process
        except BaseException:
            logging.getLogger("HWR").error("Error in spawning workflow application")
            raise
        finally:
            fp1.close()

        # This does waiting, so we want to collect the result afterwards
        super(CollectEmulator, self).data_collection_hook()

        logging.getLogger("HWR").info("Waiting for simcal collection emulation.")
        # NBNB TODO put in time-out, somehow
        return_code = running_process.wait()
        process = gphl_connection.collect_emulator_process
        gphl_connection.collect_emulator_process = None
        if process == 'ABORTED':
            logging.getLogger("HWR").info("Simcal collection emulation aborted")
        elif return_code:
            raise RuntimeError(
                "simcal process terminated with return code %s" % return_code
            )
        else:
            logging.getLogger("HWR").info("Simcal collection emulation successful")

        return
Ejemplo n.º 24
0
    def data_collection_hook(self):
        """Spawns data emulation using gphl simcal"""

        data_collect_parameters = self.current_dc_parameters

        if not HWR.beamline.gphl_workflow:
            raise ValueError("Emulator requires GPhL workflow installation")
        gphl_connection = HWR.beamline.gphl_connection
        if not gphl_connection:
            raise ValueError("Emulator requires GPhL connection installation")

        # Get program locations
        simcal_executive = gphl_connection.get_executable("simcal")
        # Get environmental variables
        envs = {
            "BDG_home":
            gphl_connection.software_paths["BDG_home"],
            "GPHL_INSTALLATION":
            gphl_connection.software_paths["GPHL_INSTALLATION"],
        }
        text_type = ConvertUtils.text_type
        for tag, val in self["environment_variables"].getProperties().items():
            envs[text_type(tag)] = text_type(val)

        # get crystal data
        sample_name = self.getProperty("default_sample_name")
        sample = HWR.beamline.sample_changer.getLoadedSample()
        if sample:
            ss0 = sample.getName()
            if ss0 and ss0.startswith(self.TEST_SAMPLE_PREFIX):
                sample_name = ss0[len(self.TEST_SAMPLE_PREFIX):]

        sample_dir = gphl_connection.software_paths.get("gphl_test_samples")
        if not sample_dir:
            raise ValueError(
                "Emulator requires gphl_test_samples dir specified")
        sample_dir = os.path.join(sample_dir, sample_name)
        if not os.path.isdir(sample_dir):
            raise ValueError("Sample data directory %s does not exist" %
                             sample_dir)
        crystal_file = os.path.join(sample_dir, "crystal.nml")
        if not os.path.isfile(crystal_file):
            raise ValueError("Emulator crystal data file %s does not exist" %
                             crystal_file)
        # in spite of the simcal_crystal_list name this returns an OrderdDict
        crystal_data = f90nml.read(crystal_file)["simcal_crystal_list"]
        if isinstance(crystal_data, list):
            crystal_data = crystal_data[0]

        input_data = self._get_simcal_input(data_collect_parameters,
                                            crystal_data)

        # NB outfile is the echo output of the input file;
        # image files templates are set in the input file
        file_info = data_collect_parameters["fileinfo"]
        if not os.path.exists(file_info["directory"]):
            os.makedirs(file_info["directory"])
        if not os.path.exists(file_info["directory"]):
            os.makedirs(file_info["directory"])
        infile = os.path.join(file_info["directory"],
                              "simcal_in_%s.nml" % self._counter)

        f90nml.write(input_data, infile, force=True)
        outfile = os.path.join(file_info["directory"],
                               "simcal_out_%s.nml" % self._counter)
        logfile = os.path.join(file_info["directory"],
                               "simcal_log_%s.txt" % self._counter)
        self._counter += 1
        hklfile = os.path.join(sample_dir, "sample.hkli")
        if not os.path.isfile(hklfile):
            raise ValueError("Emulator hkli file %s does not exist" % hklfile)
        command_list = [
            simcal_executive,
            "--input",
            infile,
            "--output",
            outfile,
            "--hkl",
            hklfile,
        ]

        for tag, val in self["simcal_options"].getProperties().items():
            command_list.extend(
                ConvertUtils.command_option(tag, val, prefix="--"))
        logging.getLogger("HWR").info("Executing command: %s", command_list)
        logging.getLogger("HWR").info("Executing environment: %s" %
                                      sorted(envs.items()))

        fp1 = open(logfile, "w")
        fp2 = subprocess.STDOUT
        # resource.setrlimit(resource.RLIMIT_STACK, (-1,-1))

        try:
            running_process = subprocess.Popen(command_list,
                                               stdout=fp1,
                                               stderr=fp2,
                                               env=envs)
            gphl_connection.collect_emulator_process = running_process
        except BaseException:
            logging.getLogger("HWR").error(
                "Error in spawning workflow application")
            raise
        finally:
            fp1.close()

        # This does waiting, so we want to collect the result afterwards
        super(CollectEmulator, self).data_collection_hook()

        logging.getLogger("HWR").info(
            "Waiting for simcal collection emulation.")
        # NBNB TODO put in time-out, somehow
        return_code = running_process.wait()
        process = gphl_connection.collect_emulator_process
        gphl_connection.collect_emulator_process = None
        if process == "ABORTED":
            logging.getLogger("HWR").info(
                "Simcal collection emulation aborted")
        elif return_code:
            raise RuntimeError(
                "simcal process terminated with return code %s" % return_code)
        else:
            logging.getLogger("HWR").info(
                "Simcal collection emulation successful")

        return
    def start_workflow(self, workflow_queue, workflow_model_obj):

        # NBNB All command line option values are put in quotes (repr) when
        # the workflow is invoked remotely through ssh.

        self.workflow_queue = workflow_queue

        if self.get_state() != States.OFF:
            # NB, for now workflow is started as the connection is made,
            # so we are never in state 'ON'/STANDBY
            raise RuntimeError(
                "Workflow is already running, cannot be started")

        self._workflow_name = workflow_model_obj.get_type()
        params = workflow_model_obj.get_workflow_parameters()

        in_shell = self.hasObject("ssh_options")
        if in_shell:
            dd0 = self["ssh_options"].getProperties().copy()
            #
            host = dd0.pop("Host")
            command_list = ["ssh"]
            if "ConfigFile" in dd0:
                command_list.extend(("-F", dd0.pop("ConfigFile")))
            for tag, val in sorted(dd0.items()):
                command_list.extend(("-o", "%s=%s" % (tag, val)))
                # command_list.extend(('-o', tag, val))
            command_list.append(host)
        else:
            command_list = []
        command_list.append(self.software_paths["java_binary"])

        # # HACK - debug options REMOVE!
        # import socket
        # sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        # sock.connect(("8.8.8.8", 80))
        # ss0 = "-agentlib:jdwp=transport=dt_socket,address=%s:8050,server=y,suspend=y"
        # command_list.append(ss0 % sock.getsockname()[0])

        for tag, val in sorted(
                params.get("invocation_properties", {}).items()):
            command_list.extend(
                ConvertUtils.java_property(tag, val, quote_value=in_shell))

        # We must get hold of the options here, as we need wdir for a property
        workflow_options = dict(params.get("options", {}))
        calibration_name = workflow_options.get("calibration")
        if calibration_name:
            # Expand calibration base name - to simplify identification.
            workflow_options["calibration"] = "%s_%s" % (
                calibration_name,
                workflow_model_obj.get_name(),
            )
        elif not workflow_options.get("strategy"):
            workflow_options[
                "strategy"] = workflow_model_obj.get_characterisation_strategy(
                )
        path_template = workflow_model_obj.get_path_template()
        if "prefix" in workflow_options:
            workflow_options["prefix"] = path_template.base_prefix
        workflow_options["wdir"] = os.path.join(
            path_template.process_directory, self.getProperty("gphl_subdir"))
        # Hardcoded - location for log output
        command_list.extend(
            ConvertUtils.java_property("co.gphl.wf.wdir",
                                       workflow_options["wdir"],
                                       quote_value=in_shell))

        ll0 = ConvertUtils.command_option(
            "cp",
            self.software_paths["gphl_java_classpath"],
            quote_value=in_shell)
        command_list.extend(ll0)

        command_list.append(params["application"])

        for keyword, value in params.get("properties", {}).items():
            command_list.extend(
                ConvertUtils.java_property(keyword,
                                           value,
                                           quote_value=in_shell))
        for keyword, value in self.java_properties.items():
            command_list.extend(
                ConvertUtils.java_property(keyword,
                                           value,
                                           quote_value=in_shell))

        for keyword, value in workflow_options.items():
            command_list.extend(
                ConvertUtils.command_option(keyword,
                                            value,
                                            quote_value=in_shell))
        #
        wdir = workflow_options.get("wdir")
        # NB this creates the appdir as well (wdir is within appdir)
        if not os.path.isdir(wdir):
            try:
                os.makedirs(wdir)
            except Exception:
                # No need to raise error - program will fail downstream
                logging.getLogger("HWR").error(
                    "Could not create GPhL working directory: %s", wdir)

        for ss0 in command_list:
            ss0 = ss0.split("=")[-1]
            if ss0.startswith(
                    "/") and "*" not in ss0 and not os.path.exists(ss0):
                logging.getLogger("HWR").warning("File does not exist : %s" %
                                                 ss0)

        logging.getLogger("HWR").info("GPhL execute :\n%s",
                                      " ".join(command_list))

        # Get environmental variables
        envs = os.environ.copy()

        # # Trick to allow unauthorised account (e.g. ESRF: opid30) to run GPhL programs
        # # Any value is OK, just setting it is enough.
        # envs["AutoPROCWorkFlowUser"] = "******"

        # Hack to pass alternative installation dir for processing
        val = self.software_paths.get("gphl_wf_processing_installation")
        if val:
            envs["GPHL_PROC_INSTALLATION"] = val

        # These env variables are needed in some cases for wrapper scripts
        # Specifically for the stratcal wrapper.
        envs["GPHL_INSTALLATION"] = self.software_paths["GPHL_INSTALLATION"]
        envs["BDG_home"] = self.software_paths["BDG_home"]
        logging.getLogger("HWR").info(
            "Executing GPhL workflow, in environment %s", envs)
        try:
            self._running_process = subprocess.Popen(command_list, env=envs)
        except Exception:
            logging.getLogger().error("Error in spawning workflow application")
            raise

        logging.getLogger("py4j.clientserver").setLevel(logging.WARNING)
        self.set_state(States.RUNNING)

        logging.getLogger("HWR").debug(
            "GPhL workflow pid, returncode : %s, %s" %
            (self._running_process.pid, self._running_process.returncode))
    def start_workflow(self, workflow_queue, workflow_model_obj):

        # NBNB All command line option values are put in quotes (repr) when
        # the workflow is invoked remotely through ssh.

        self.workflow_queue = workflow_queue

        if self.get_state() != States.OFF:
            # NB, for now workflow is started as the connection is made,
            # so we are never in state 'ON'/STANDBY
            raise RuntimeError("Workflow is already running, cannot be started")

        self._workflow_name = workflow_model_obj.get_type()
        params = workflow_model_obj.get_workflow_parameters()

        in_shell = self.hasObject("ssh_options")
        if in_shell:
            dd0 = self["ssh_options"].getProperties().copy()
            #
            host = dd0.pop("Host")
            command_list = ["ssh"]
            if "ConfigFile" in dd0:
                command_list.extend(("-F", dd0.pop("ConfigFile")))
            for tag, val in sorted(dd0.items()):
                command_list.extend(("-o", "%s=%s" % (tag, val)))
                # command_list.extend(('-o', tag, val))
            command_list.append(host)
        else:
            command_list = []
        command_list.append(self.software_paths["java_binary"])

        # # HACK - debug options REMOVE!
        # import socket
        # sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        # sock.connect(("8.8.8.8", 80))
        # ss0 = "-agentlib:jdwp=transport=dt_socket,address=%s:8050,server=y,suspend=y"
        # command_list.append(ss0 % sock.getsockname()[0])

        for tag, val in sorted(params.get("invocation_properties", {}).items()):
            command_list.extend(
                ConvertUtils.java_property(tag, val, quote_value=in_shell)
            )

        # We must get hold of the options here, as we need wdir for a property
        workflow_options = dict(params.get("options", {}))
        calibration_name = workflow_options.get("calibration")
        if calibration_name:
            # Expand calibration base name - to simplify identification.
            workflow_options["calibration"] = "%s_%s" % (
                calibration_name,
                workflow_model_obj.get_name(),
            )
        path_template = workflow_model_obj.get_path_template()
        if "prefix" in workflow_options:
            workflow_options["prefix"] = path_template.base_prefix
        workflow_options["wdir"] = os.path.join(
            path_template.process_directory, self.getProperty("gphl_subdir")
        )
        # Hardcoded - location for log output
        command_list.extend(
            ConvertUtils.java_property(
                "co.gphl.wf.wdir", workflow_options["wdir"], quote_value=in_shell
            )
        )

        ll0 = ConvertUtils.command_option(
            "cp", self.software_paths["gphl_java_classpath"], quote_value=in_shell
        )
        command_list.extend(ll0)

        command_list.append(params["application"])

        for keyword, value in params.get("properties", {}).items():
            command_list.extend(
                ConvertUtils.java_property(keyword, value, quote_value=in_shell)
            )
        for keyword, value in self.java_properties.items():
            command_list.extend(
                ConvertUtils.java_property(keyword, value, quote_value=in_shell)
            )

        for keyword, value in workflow_options.items():
            command_list.extend(
                ConvertUtils.command_option(keyword, value, quote_value=in_shell)
            )
        #
        wdir = workflow_options.get("wdir")
        # NB this creates the appdir as well (wdir is within appdir)
        if not os.path.isdir(wdir):
            try:
                os.makedirs(wdir)
            except BaseException:
                # No need to raise error - program will fail downstream
                logging.getLogger("HWR").error(
                    "Could not create GPhL working directory: %s", wdir
                )

        for ss0 in command_list:
            ss0 = ss0.split("=")[-1]
            if ss0.startswith("/") and "*" not in ss0 and not os.path.exists(ss0):
                logging.getLogger("HWR").warning("File does not exist : %s" % ss0)

        logging.getLogger("HWR").info("GPhL execute :\n%s", " ".join(command_list))

        # Get environmental variables
        envs = os.environ.copy()

        # # Trick to allow unauthorised account (e.g. ESRF: opid30) to run GPhL programs
        # # Any value is OK, just setting it is enough.
        # envs["AutoPROCWorkFlowUser"] = "******"

        # Hack to pass alternative installation dir for processing
        val = self.software_paths.get("gphl_wf_processing_installation")
        if val:
            envs["GPHL_PROC_INSTALLATION"] = val

        # These env variables are needed in some cases for wrapper scripts
        # Specifically for the stratcal wrapper.
        envs["GPHL_INSTALLATION"] = self.software_paths["GPHL_INSTALLATION"]
        envs["BDG_home"] = self.software_paths["BDG_home"]
        logging.getLogger("HWR").info(
            "Executing GPhL workflow, in environment %s", envs
        )
        try:
            self._running_process = subprocess.Popen(command_list, env=envs)
        except BaseException:
            logging.getLogger().error("Error in spawning workflow application")
            raise

        logging.getLogger("py4j.clientserver").setLevel(logging.WARNING)
        self.set_state(States.RUNNING)

        logging.getLogger("HWR").debug(
            "GPhL workflow pid, returncode : %s, %s"
            % (self._running_process.pid, self._running_process.returncode)
        )
Ejemplo n.º 27
0
    def _create_task(self, sample, shape):
        tasks = []

        path_template = self._create_path_template(sample, self._path_template)
        path_template.num_files = 0
        path_template.compression = False

        workflow_hwobj = api.gphl_workflow
        if workflow_hwobj.get_state() == States.OFF:
            # We will be setting up the connection now - time to connect to quit
            QtImport.QApplication.instance().aboutToQuit.connect(
                workflow_hwobj.shutdown
            )

            tree_brick = self._tree_brick
            if tree_brick:
                tree_brick.dc_tree_widget.confirm_dialog.continueClickedSignal.connect(
                    self.continue_button_click
                )

        wf = queue_model_objects.GphlWorkflow(workflow_hwobj)
        wf_type = ConvertUtils.text_type(self._workflow_cbox.currentText())
        wf.set_type(wf_type)

        if self.current_prefix:
            path_template.base_prefix = self.current_prefix
        wf.path_template = path_template
        wf.set_name(wf.path_template.get_prefix())
        wf.set_number(wf.path_template.run_number)

        wf_parameters = workflow_hwobj.get_available_workflows()[wf_type]
        strategy_type = wf_parameters.get("strategy_type")
        wf.set_interleave_order(wf_parameters.get("interleaveOrder", ""))
        if strategy_type.startswith("transcal"):
            pass

        elif strategy_type.startswith("diffractcal"):
            ss0 = self._gphl_diffractcal_widget.get_parameter_value("test_crystal")
            crystal_data = self._gphl_diffractcal_widget.test_crystals.get(ss0)
            wf.set_space_group(crystal_data.space_group)
            wf.set_cell_parameters(
                tuple(
                    getattr(crystal_data, tag)
                    for tag in ("a", "b", "c", "alpha", "beta", "gamma")
                )
            )
        else:
            # Coulds be native_... phasing_... etc.

            wf.set_space_group(
                self._gphl_acq_param_widget.get_parameter_value("space_group")
            )
            tag = self._gphl_acq_param_widget.get_parameter_value("crystal_system")
            crystal_system, point_group = None, None
            if tag:
                data = self._gphl_acq_param_widget._CRYSTAL_SYSTEM_DATA[tag]
                crystal_system = data.crystal_system
                point_groups = data.point_groups
                if len(point_groups) == 1 or point_groups[0] == "32":
                    # '32' is a special case; '312' and '321' are also returned as '32'
                    point_group = point_groups[0]
            wf.set_point_group(point_group)
            wf.set_crystal_system(crystal_system)
            wf.set_beam_energies(wf_parameters["beam_energies"])

        tasks.append(wf)

        return tasks