class TestPointGeneratorMeta(unittest.TestCase):

    def setUp(self):
        self.PGM = PointGeneratorMeta("test_description")

    def test_init(self):
        self.assertEqual("test_description", self.PGM.description)
        self.assertEqual(self.PGM.label, "")

    def test_validate(self):
        g = CompoundGenerator([MagicMock()], [], [])
        self.PGM.validate(g)

    @patch('malcolm.core.vmetas.pointgeneratormeta.CompoundGenerator.from_dict')
    def test_validate_dict_then_create_and_return(self, from_dict_mock):
        gen_mock = MagicMock()
        from_dict_mock.return_value = gen_mock
        d = dict()
        response = self.PGM.validate(d)
        from_dict_mock.assert_called_once_with(d)
        self.assertEqual(gen_mock, response)

    def test_validate_raises(self):
        with self.assertRaises(TypeError):
            self.PGM.validate(7)
Пример #2
0
class ScanTickerPart(ChildPart):
    # Generator instance
    generator = None
    # Where to start
    completed_steps = None
    # How many steps to do
    steps_to_do = None
    # When to blow up
    exception_step = None

    @RunnableController.Configure
    @RunnableController.PostRunReady
    @RunnableController.Seek
    @method_takes(
        "generator", PointGeneratorMeta("Generator instance"), REQUIRED,
        "axesToMove",
        StringArrayMeta(
            "List of axes in inner dimension of generator that should be moved"
        ), REQUIRED, "exceptionStep",
        NumberMeta("int32",
                   "If >0, raise an exception at the end of this step"), 0)
    def configure(self, task, completed_steps, steps_to_do, part_info, params):
        # If we are being asked to move
        if self.name in params.axesToMove:
            # Just store the generator and place we need to start
            self.generator = params.generator
            self.completed_steps = completed_steps
            self.steps_to_do = steps_to_do
            self.exception_step = params.exceptionStep
        else:
            # Flag nothing to do
            self.generator = None

    @RunnableController.Run
    @RunnableController.Resume
    def run(self, task, update_completed_steps):
        # Start time so everything is relative
        point_time = time.time()
        if self.generator:
            for i in range(self.completed_steps,
                           self.completed_steps + self.steps_to_do):
                # Get the point we are meant to be scanning
                point = self.generator.get_point(i)
                # Update the child counter to be the demand position
                position = point.positions[self.name]
                task.put(self.child["counter"], position)
                # Wait until the next point is due
                point_time += point.duration
                wait_time = point_time - time.time()
                task.sleep(wait_time)
                # Update the point as being complete
                update_completed_steps(i + 1, self)
                # If this is the exception step then blow up
                assert i +1 != self.exception_step, \
                    "Raising exception at step %s" % self.exception_step
Пример #3
0
    Info, AbortedError, BadValueError
from malcolm.core.vmetas import PointGeneratorMeta, NumberMeta, StringArrayMeta


class ParameterTweakInfo(Info):
    """Tweaks"""
    def __init__(self, parameter, value):
        self.parameter = parameter
        self.value = value


sm = RunnableStateMachine

configure_args = [
    "generator",
    PointGeneratorMeta("Generator instance"), REQUIRED, "axesToMove",
    StringArrayMeta(
        "List of axes in inner dimension of generator that should be moved"),
    []
]


@method_also_takes("axesToMove",
                   StringArrayMeta("Default value for configure() axesToMove"),
                   [])
class RunnableController(ManagerController):
    """RunnableDevice implementer that also exposes GUI for child parts"""
    # The stateMachine that this controller implements
    stateMachine = sm()

    Validate = Hook()
Пример #4
0
# user programs
NO_PROGRAM = 0       # Do nothing
TRIG_CAPTURE = 4     # Capture 1, Frame 0, Detector 0
TRIG_DEAD_FRAME = 2  # Capture 0, Frame 1, Detector 0
TRIG_LIVE_FRAME = 3  # Capture 0, Frame 1, Detector 1
TRIG_ZERO = 8        # Capture 0, Frame 0, Detector 0

# How many generator points to load each time
POINTS_PER_BUILD = 4000

# All possible PMAC CS axis assignment
cs_axis_names = list("ABCUVWXYZ")

# Args for configure and validate
configure_args = [
    "generator", PointGeneratorMeta("Generator instance"), REQUIRED,
    "axesToMove", StringArrayMeta(
        "List of axes in inner dimension of generator that should be moved"),
    []]


# Class for these motor variables
class MotorInfo(Info):
    def __init__(self, cs_axis, cs_port, acceleration, resolution, offset,
                 max_velocity, current_position, scannable, velocity_settle):
        self.cs_axis = cs_axis
        self.cs_port = cs_port
        self.acceleration = acceleration
        self.resolution = resolution
        self.offset = offset
        self.max_velocity = max_velocity
Пример #5
0
class PositionLabellerPart(ChildPart):
    # Stored generator for positions
    generator = None
    # The last index we have loaded
    end_index = 0
    # Where we should stop loading points
    steps_up_to = 0
    # Future for plugin run
    start_future = None
    # If we are currently loading then block loading more points
    loading = False

    def _make_xml(self, start_index):

        # Make xml root
        root_el = ET.Element("pos_layout")
        dimensions_el = ET.SubElement(root_el, "dimensions")

        # Make an index for every hdf index
        for i in range(len(self.generator.dimensions)):
            ET.SubElement(dimensions_el, "dimension", name="d%d" % i)

        # Add the a file close command for the HDF writer
        ET.SubElement(dimensions_el, "dimension", name="FilePluginClose")

        # Add the actual positions
        positions_el = ET.SubElement(root_el, "positions")

        end_index = start_index + POSITIONS_PER_XML
        if end_index > self.steps_up_to:
            end_index = self.steps_up_to

        for i in range(start_index, end_index):
            point = self.generator.get_point(i)
            if i == self.generator.size - 1:
                do_close = True
            else:
                do_close = False
            positions = dict(FilePluginClose="%d" % do_close)
            for j, value in enumerate(point.indexes):
                positions["d%d" % j] = str(value)
            position_el = ET.Element("position", **positions)
            positions_el.append(position_el)

        xml = et_to_string(root_el)
        xml_length = len(xml)
        assert xml_length < XML_MAX_SIZE, "XML size %d too big" % xml_length
        return xml, end_index

    @RunnableController.Reset
    def reset(self, task):
        super(PositionLabellerPart, self).reset(task)
        self.abort(task)

    @RunnableController.Configure
    @RunnableController.PostRunReady
    @RunnableController.Seek
    @method_takes("generator", PointGeneratorMeta("Generator instance"),
                  REQUIRED)
    def configure(self, task, completed_steps, steps_to_do, part_info, params):
        # clear out old subscriptions
        task.unsubscribe_all()
        self.generator = params.generator
        # Delete any remaining old positions
        futures = task.post_async(self.child["delete"])
        futures += task.put_many_async(
            self.child, dict(enableCallbacks=True,
                             idStart=completed_steps + 1))
        self.steps_up_to = completed_steps + steps_to_do
        xml, self.end_index = self._make_xml(completed_steps)
        # Wait for the previous puts to finish
        task.wait_all(futures)
        # Put the xml
        task.put(self.child["xml"], xml)
        # Start the plugin
        self.start_future = task.post_async(self.child["start"])

    @RunnableController.Run
    @RunnableController.Resume
    def run(self, task, update_completed_steps):
        self.loading = False
        task.subscribe(self.child["qty"], self.load_more_positions, task)
        task.wait_all(self.start_future)

    def load_more_positions(self, number_left, task):
        if not self.loading and self.end_index < self.steps_up_to and \
                        number_left < POSITIONS_PER_XML * N_LOAD_AHEAD:
            self.loading = True
            xml, self.end_index = self._make_xml(self.end_index)
            task.put(self.child["xml"], xml)
            self.loading = False

    @RunnableController.Abort
    @RunnableController.Pause
    def abort(self, task):
        task.post(self.child["stop"])
 def setUp(self):
     self.PGM = PointGeneratorMeta("test_description")
Пример #7
0
class HDFWriterPart(ChildPart):
    # Attributes
    datasets = None

    # Future for the start action
    start_future = None
    array_future = None
    done_when_reaches = 0

    def _create_dataset_infos(self, part_info, generator, filename):
        # Update the dataset table
        uniqueid = "/entry/NDAttributes/NDArrayUniqueId"
        generator_rank = len(generator.dimensions)

        # Get the detector name from the primary source
        ndarray_infos = NDArrayDatasetInfo.filter_values(part_info)
        assert len(ndarray_infos) in (0, 1), \
            "More than one NDArrayDatasetInfo defined %s" % ndarray_infos

        # Add the primary datasource
        if ndarray_infos:
            ndarray_info = ndarray_infos[0]
            yield DatasetProducedInfo(name="%s.data" % ndarray_info.name,
                                      filename=filename,
                                      type="primary",
                                      rank=ndarray_info.rank + generator_rank,
                                      path="/entry/detector/detector",
                                      uniqueid=uniqueid)

            # Add any secondary datasources
            for calculated_info in \
                    CalculatedNDAttributeDatasetInfo.filter_values(part_info):
                yield DatasetProducedInfo(
                    name="%s.%s" % (ndarray_info.name, calculated_info.name),
                    filename=filename,
                    type="secondary",
                    rank=ndarray_info.rank + generator_rank,
                    path="/entry/%s/%s" %
                    (calculated_info.name, calculated_info.name),
                    uniqueid=uniqueid)

        # Add all the other datasources
        for dataset_info in NDAttributeDatasetInfo.filter_values(part_info):
            if dataset_info.type == "detector":
                # Something like I0
                name = "%s.data" % dataset_info.name
                type = "primary"
            elif dataset_info.type == "monitor":
                # Something like Iref
                name = "%s.data" % dataset_info.name
                type = "monitor"
            elif dataset_info.type == "position":
                # Something like x
                name = "%s.value" % dataset_info.name
                type = "position_value"
            else:
                raise AttributeError(
                    "Bad dataset type %r, should be in %s" %
                    (dataset_info.type, attribute_dataset_types))
            yield DatasetProducedInfo(name=name,
                                      filename=filename,
                                      type=type,
                                      rank=dataset_info.rank + generator_rank,
                                      path="/entry/%s/%s" %
                                      (dataset_info.name, dataset_info.name),
                                      uniqueid=uniqueid)

        # Add any setpoint dimensions
        for dim in generator.axes:
            yield DatasetProducedInfo(name="%s.value_set" % dim,
                                      filename=filename,
                                      type="position_set",
                                      rank=1,
                                      path="/entry/detector/%s_set" % dim,
                                      uniqueid="")

    @RunnableController.Reset
    def reset(self, task):
        super(HDFWriterPart, self).reset(task)
        self.abort(task)

    @RunnableController.Configure
    @method_takes("generator", PointGeneratorMeta("Generator instance"),
                  REQUIRED, "filePath",
                  StringMeta("File path to write data to"), REQUIRED)
    def configure(self, task, completed_steps, steps_to_do, part_info, params):
        self.done_when_reaches = completed_steps + steps_to_do
        # For first run then open the file
        # Enable position mode before setting any position related things
        task.put(self.child["positionMode"], True)
        # Setup our required settings
        # TODO: this should be different for windows detectors
        file_path = params.filePath.rstrip(os.sep)
        file_dir, filename = file_path.rsplit(os.sep, 1)
        assert "." in filename, \
            "File extension for %r should be supplied" % filename
        futures = task.put_many_async(
            self.child,
            dict(enableCallbacks=True,
                 fileWriteMode="Stream",
                 swmrMode=True,
                 positionMode=True,
                 dimAttDatasets=True,
                 lazyOpen=True,
                 arrayCounter=0,
                 filePath=file_dir + os.sep,
                 fileName=filename,
                 fileTemplate="%s%s"))
        futures += self._set_dimensions(task, params.generator)
        xml = self._make_layout_xml(params.generator, part_info)
        layout_filename = os.path.join(file_dir,
                                       "%s-layout.xml" % self.params.mri)
        open(layout_filename, "w").write(xml)
        futures += task.put_async(self.child["xml"], layout_filename)
        # Wait for the previous puts to finish
        task.wait_all(futures)
        # Reset numCapture back to 0
        task.put(self.child["numCapture"], 0)
        # We want the HDF writer to flush this often:
        flush_time = 1  # seconds
        # (In particular this means that HDF files can be read cleanly by
        # SciSoft at the start of a scan.)
        assert params.generator.duration > 0, \
            "Duration %s for generator must be >0 to signify constant exposure"\
            % params.generator.duration
        n_frames_between_flushes = max(
            2, round(flush_time / params.generator.duration))
        task.put(self.child["flushDataPerNFrames"], n_frames_between_flushes)
        task.put(self.child["flushAttrPerNFrames"], n_frames_between_flushes)

        # Start the plugin
        self.start_future = task.post_async(self.child["start"])
        # Start a future waiting for the first array
        self.array_future = task.when_matches_async(self.child["arrayCounter"],
                                                    1)
        # Return the dataset information
        dataset_infos = list(
            self._create_dataset_infos(part_info, params.generator, filename))
        return dataset_infos

    @RunnableController.PostRunReady
    @RunnableController.Seek
    def seek(self, task, completed_steps, steps_to_do, part_info):
        self.done_when_reaches = completed_steps + steps_to_do
        # Just reset the array counter
        task.put(self.child["arrayCounter"], 0)
        # Start a future waiting for the first array
        self.array_future = task.when_matches_async(self.child["arrayCounter"],
                                                    1)

    @RunnableController.Run
    @RunnableController.Resume
    def run(self, task, update_completed_steps):
        task.wait_all(self.array_future)
        task.unsubscribe_all()
        task.subscribe(self.child["uniqueId"], update_completed_steps, self)
        # TODO: what happens if we miss the last frame?
        task.when_matches(self.child["uniqueId"], self.done_when_reaches)

    @RunnableController.PostRunIdle
    def post_run_idle(self, task):
        # If this is the last one, wait until the file is closed
        task.wait_all(self.start_future)

    @RunnableController.Abort
    def abort(self, task):
        task.post(self.child["stop"])

    def _set_dimensions(self, task, generator):
        num_dims = len(generator.dimensions)
        assert num_dims <= 10, \
            "Can only do 10 dims, you gave me %s" % num_dims
        attr_dict = dict(numExtraDims=num_dims - 1)
        # Fill in dim name and size
        # NOTE: HDF writer has these filled with fastest moving first
        # while dimensions is slowest moving first
        for i in range(10):
            suffix = SUFFIXES[i]
            if i < num_dims:
                forward_i = num_dims - i - 1
                index_name = "d%d" % forward_i
                index_size = generator.dimensions[forward_i].size
            else:
                index_name = ""
                index_size = 1
            attr_dict["posNameDim%s" % suffix] = index_name
            attr_dict["extraDimSize%s" % suffix] = index_size
        futures = task.put_many_async(self.child, attr_dict)
        return futures

    def _make_nxdata(self, name, rank, entry_el, generator, link=False):
        # Make a dataset for the data
        data_el = ET.SubElement(entry_el, "group", name=name)
        ET.SubElement(data_el,
                      "attribute",
                      name="signal",
                      source="constant",
                      value=name,
                      type="string")
        pad_dims = []
        for d in generator.dimensions:
            if len(d.axes) == 1:
                pad_dims.append("%s_set" % d.axes[0])
            else:
                pad_dims.append(".")

        pad_dims += ["."] * rank
        ET.SubElement(data_el,
                      "attribute",
                      name="axes",
                      source="constant",
                      value=",".join(pad_dims),
                      type="string")
        ET.SubElement(data_el,
                      "attribute",
                      name="NX_class",
                      source="constant",
                      value="NXdata",
                      type="string")
        # Add in the indices into the dimensions array that our axes refer to
        for i, d in enumerate(generator.dimensions):
            for axis in d.axes:
                ET.SubElement(data_el,
                              "attribute",
                              name="%s_set_indices" % axis,
                              source="constant",
                              value=str(i),
                              type="string")
                if link:
                    ET.SubElement(data_el,
                                  "hardlink",
                                  name="%s_set" % axis,
                                  target="/entry/detector/%s_set" % axis)
                else:
                    self._make_set_points(d, axis, data_el,
                                          generator.units[axis])
        return data_el

    def _make_set_points(self, dimension, axis, data_el, units):
        axis_vals = ["%.12g" % p for p in dimension.get_positions(axis)]
        axis_el = ET.SubElement(data_el,
                                "dataset",
                                name="%s_set" % axis,
                                source="constant",
                                type="float",
                                value=",".join(axis_vals))
        ET.SubElement(axis_el,
                      "attribute",
                      name="units",
                      source="constant",
                      value=units,
                      type="string")

    def _make_layout_xml(self, generator, part_info):
        # Make a root element with an NXEntry
        root_el = ET.Element("hdf5_layout")
        entry_el = ET.SubElement(root_el, "group", name="entry")
        ET.SubElement(entry_el,
                      "attribute",
                      name="NX_class",
                      source="constant",
                      value="NXentry",
                      type="string")

        # Check that there is only one primary source of detector data
        ndarray_infos = NDArrayDatasetInfo.filter_values(part_info)
        if not ndarray_infos:
            # Still need to put the data in the file, so manufacture something
            primary_rank = 1
        else:
            primary_rank = ndarray_infos[0].rank

        # Make an NXData element with the detector data in it in
        # /entry/detector/detector
        data_el = self._make_nxdata("detector", primary_rank, entry_el,
                                    generator)
        det_el = ET.SubElement(data_el,
                               "dataset",
                               name="detector",
                               source="detector",
                               det_default="true")
        ET.SubElement(det_el,
                      "attribute",
                      name="NX_class",
                      source="constant",
                      value="SDS",
                      type="string")

        # Now add any calculated sources of data
        for dataset_info in \
                CalculatedNDAttributeDatasetInfo.filter_values(part_info):
            # if we are a secondary source, use the same rank as the det
            attr_el = self._make_nxdata(dataset_info.name,
                                        primary_rank,
                                        entry_el,
                                        generator,
                                        link=True)
            ET.SubElement(attr_el,
                          "dataset",
                          name=dataset_info.name,
                          source="ndattribute",
                          ndattribute=dataset_info.attr)

        # And then any other attribute sources of data
        for dataset_info in NDAttributeDatasetInfo.filter_values(part_info):
            # if we are a secondary source, use the same rank as the det
            attr_el = self._make_nxdata(dataset_info.name,
                                        dataset_info.rank,
                                        entry_el,
                                        generator,
                                        link=True)
            ET.SubElement(attr_el,
                          "dataset",
                          name=dataset_info.name,
                          source="ndattribute",
                          ndattribute=dataset_info.attr)

        # Add a group for attributes
        NDAttributes_el = ET.SubElement(entry_el,
                                        "group",
                                        name="NDAttributes",
                                        ndattr_default="true")
        ET.SubElement(NDAttributes_el,
                      "attribute",
                      name="NX_class",
                      source="constant",
                      value="NXcollection",
                      type="string")
        xml = et_to_string(root_el)
        return xml