def test_part_becomes_faulty_with_BadValueError_on_init(self, mock_on_init): mock_context = Mock(name="context_mock") mock_on_init.side_effect = BadValueError() self.detector_part.on_init(mock_context) mock_on_init.assert_called_once_with(mock_context) self.assertEqual(True, self.detector_part.faulty)
def do_validate(**params): if not takes_exposure: params.pop("exposure", None) try: return child.validate(**params) except Exception as e: raise BadValueError("Validate of %s failed: %s" % (self.mri, stringify_error(e)))
def test_on_run_raises_run_future_exception_when_child_is_in_fault(self): mock_context = Mock(name="context_mock") mock_child = Mock(name="child_mock") mock_run_future = Mock(name="run_future_mock") mock_run_future.exception.return_value = TimeoutError() mock_child.state = MockChildState([RunnableStates.ARMED, RunnableStates.FAULT]) mock_child.run_async.return_value = mock_run_future mock_context.block_view.return_value = mock_child mock_context.wait_all_futures.side_effect = BadValueError() self.assertRaises(TimeoutError, self.detector_part.on_run, mock_context)
def await_ioc_start(stats, prefix): cothread.Yield() pid_rbv = catools.caget(f"{prefix}:PID", timeout=5) if int(pid_rbv) != os.getpid(): raise BadValueError("Got back different PID: " + "is there another system instance on the machine?") catools.caput(f"{prefix}:YAML:PATH", stats["yaml_path"], datatype=catools.DBR_CHAR_STR) catools.caput( f"{prefix}:PYMALCOLM:PATH", stats["pymalcolm_path"], datatype=catools.DBR_CHAR_STR, )
def start_ioc(stats, prefix): db_macros = "prefix='%s'" % prefix try: epics_base = os.environ["EPICS_BASE"] except KeyError: raise BadValueError("EPICS base not defined in environment") softIoc_bin = epics_base + "/bin/linux-x86_64/softIoc" for key, value in stats.items(): db_macros += ",%s='%s'" % (key, value) root = os.path.split(os.path.dirname(os.path.abspath(__file__)))[0] db_template = os.path.join(root, "db", "system.template") ioc = subprocess.Popen( [softIoc_bin, "-m", db_macros, "-d", db_template], stdout=subprocess.PIPE, stdin=subprocess.PIPE, ) cothread.Spawn(await_ioc_start, stats, prefix) return ioc
def __init__( self, name: util.APartName, description: util.AMetaDescription, pv_list: util.APvList = (), name_list: util.ANameList = (), min_delta: util.AMinDelta = 0.05, timeout: util.ATimeout = DEFAULT_TIMEOUT, widget: util.AWidget = Widget.PLOT, group: util.AGroup = None, config: util.AConfig = True, display_from_pv: util.AGetLimits = True, ) -> None: if len(pv_list) != len(name_list): raise BadValueError( "List of PVs must be same length as list of names!") super().__init__(name) self.display_from_pv = display_from_pv elements = {} for name in name_list: elements[name] = NumberArrayMeta("float64", name, tags=[Widget.TEXTUPDATE.tag()]) self.name_list = name_list self.pv_list = pv_list self.caa = util.WaveformTableAttribute( TableMeta(description, writeable=False, elements=elements), util.catools.DBR_DOUBLE, pv_list, name_list, min_delta, timeout, widget, group, config, on_connect=self._update_display, )
def on_validate( self, context: AContext, part_info: APartInfo, generator: AGenerator, fileDir: AFileDir, detectors: ADetectorTable = None, axesToMove: AAxesToMove = None, breakpoints: ABreakpoints = None, fileTemplate: AFileTemplate = "%s.h5", ) -> UParameterTweakInfos: # Work out if we are taking part enable, frames_per_step, kwargs = self._configure_args( generator, fileDir, detectors, axesToMove, breakpoints, fileTemplate) ret = [] tweak_detectors = False assert detectors, "No detectors" if self.name not in detectors.name: # There isn't a row for us, so add one in on validate, it will be # disabled but that is truthful tweak_detectors = True child = context.block_view(self.mri) takes_exposure = "exposure" in child.validate.meta.takes.elements def do_validate(**params): if not takes_exposure: params.pop("exposure", None) try: return child.validate(**params) except Exception as e: raise BadValueError("Validate of %s failed: %s" % (self.mri, stringify_error(e))) # Check something else is multiplying out triggers multiframe: List[DetectorMutiframeInfo] = [] info: DetectorMutiframeInfo for info in DetectorMutiframeInfo.filter_values(part_info): if cast(DetectorMutiframeInfo, info).mri == self.mri: multiframe.append(info) if enable: if self.faulty: raise BadValueError( "Detector %s was faulty at init and is unusable. If the " "detector is now working please restart Malcolm" % self.name) # Check that if we are told to set exposure that we take it if "exposure" in kwargs and not multiframe and not takes_exposure: raise BadValueError("Detector %s doesn't take exposure" % self.name) # If asked to guess frames per step, do so if frames_per_step < 1: if kwargs.get("exposure", 0) == 0: # Asked to guess both frames_per_step = 1 else: # Exposure given, so run a validate once without the # mutiplier and see what the detector gives us exposure = kwargs.pop("exposure") returns = do_validate(**kwargs) dead_time = generator.duration - returns["exposure"] frames_per_step = generator.duration // (exposure + dead_time) kwargs["exposure"] = exposure tweak_detectors = True if frames_per_step > 1 and not multiframe: raise BadValueError( "There are no trigger multipliers setup for Detector '%s' " "so framesPerStep can only be 0 or 1 for this row in the " "detectors table" % self.name) # This is a Serializable with the correct entries returns = do_validate(**kwargs) # Add in the exposure in case it is returned exposure = kwargs.setdefault("exposure", 0.0) # TODO: this will fail if we split across 2 Malcolm processes as # scanpointgenerators don't compare equal, but we don't want to # serialize everything as that is expensive for arrays for k in returns: v = returns[k] if kwargs.get(k, v) != v: if k == "exposure": exposure = v tweak_detectors = True else: ret.append(ParameterTweakInfo(k, v)) else: exposure = 0.0 if tweak_detectors: # Detector table changed, make a new onw det_row = [enable, self.name, self.mri, exposure, frames_per_step] rows = [] assert detectors, "No detectors" append_det_row = True for row in detectors.rows(): if row[1] == self.name: rows.append(det_row) append_det_row = False else: rows.append(row) if append_det_row: rows.append(det_row) new_detectors = DetectorTable.from_rows(rows) ret.append(ParameterTweakInfo("detectors", new_detectors)) return ret
def on_configure( self, context: scanning.hooks.AContext, generator: scanning.hooks.AGenerator, detectors: scanning.util.ADetectorTable = None, ) -> None: assert generator.duration > 0, ( "Can only create pulse triggers for a generator with the same " "duration for every point, not %s" % generator ) self.generator_duration = generator.duration # Get the panda and the detector we will be using child = context.block_view(self.mri) panda_mri = child.panda.value self.panda = context.block_view(panda_mri) detector_mri = child.detector.value self.detector = context.block_view(detector_mri) # Get the framesPerStep for this detector from the detectors table assert detectors, "No detectors passed in table" for enable, _, mri, _, frames_per_step in detectors.rows(): if mri == detector_mri: # Found a row telling us how many frames per step to generate if enable: assert ( frames_per_step > 0 ), "Zero frames per step for %s, how did this happen?" % (mri) self.frames_per_step = frames_per_step else: self.frames_per_step = 0 break else: raise BadValueError( "Detector table %s doesn't contain row for %s" % (detectors, detector_mri) ) # Check that the Attributes we expect are exported pulse_name = None suffixes = ["Pulses", "Width", "Step", "Delay"] expected_exports = set(self.name + s for s in suffixes) assert self.panda, "No assigned PandA" for source, export in self.panda.exports.value.rows(): if export in expected_exports: part_name = source.split(".")[0] if pulse_name: assert part_name == pulse_name, ( "Export %s defined for a different pulse block" % export ) else: pulse_name = part_name expected_exports.remove(export) assert not expected_exports, "PandA %r did not define exports %s" % ( panda_mri, sorted(expected_exports), ) # Find the PULSE Block for further checks pulse_mri: Optional[str] = None assert self.panda, "No assigned PandA" for name, mri, _, _, _ in self.panda.layout.value.rows(): if name == pulse_name: pulse_mri = mri assert pulse_mri, "Can't find mri for pulse block %r" % pulse_name # Check that the Attributes have the right units for all except Pulses pulse_block = context.block_view(pulse_mri) for suffix in suffixes: if suffix != "Pulses": units = pulse_block[suffix.lower() + "Units"].value assert ( units == "s" ), "Pulse block %r attribute %r needs units 's', not %r" % ( panda_mri, suffix, units, )
def create_vds(generator, raw_name, vds_path, child, uid_name, sum_name): vds_folder, vds_name = os.path.split(vds_path) image_width = int(child.imageWidth.value) image_height = int(child.imageHeight.value) block_size = int(child.blockSize.value) hdf_count = int(child.numProcesses.value) data_type = str(child.dataType.value) # hdf_shape tuple represents the number of images in each file hdf_shape = files_shape(generator.size, block_size, hdf_count) # The first dimension alternating has no meaning. If any subsequent ones # alternate then it will radically slow down the VDS creation and reading. # We rely on a scanning.parts.UnrollingPart to if any(dim.alternate for dim in generator.dimensions[1:]): raise BadValueError( "Snake scans are not supported as the VDS is not performant. You " "can add a scanning.parts.UnrollingPart to the top level scan " "block to unroll the scan into one long line") alternates = None files = [ os.path.join(vds_folder, f"{raw_name}_{i + 1:06d}.h5") for i in range(hdf_count) ] shape = (hdf_shape, image_height, image_width) # prepare a vds for the image data one_vds( vds_folder, vds_name, files, image_width, image_height, shape, generator, alternates, block_size, "data", "data", data_type.lower(), ) shape = (hdf_shape, 1, 1) # prepare a vds for the unique IDs one_vds( vds_folder, vds_name, files, 1, 1, shape, generator, alternates, block_size, uid_name, "uid", "uint64", ) # prepare a vds for the sums one_vds( vds_folder, vds_name, files, 1, 1, shape, generator, alternates, block_size, sum_name, "sum", "uint64", )