def make_meta(subtyp, description, tags, writeable=True, labels=None): if subtyp == "enum": if writeable: widget_type = "combo" else: widget_type = "textupdate" tags.append(widget(widget_type)) meta = ChoiceMeta(description, labels, tags) elif subtyp == "bit": if writeable: widget_type = "checkbox" else: widget_type = "led" tags.append(widget(widget_type)) meta = BooleanMeta(description, tags) else: if writeable: widget_type = "textinput" else: widget_type = "textupdate" tags.append(widget(widget_type)) if subtyp == "uint": meta = NumberMeta("uint32", description, tags) elif subtyp == "int": meta = NumberMeta("int32", description, tags) elif subtyp == "scalar": meta = NumberMeta("float64", description, tags) elif subtyp == "lut": meta = StringMeta(description, tags) elif subtyp in ("pos", "relative_pos"): meta = NumberMeta("float64", description, tags) else: raise ValueError("Unknown subtype %r" % subtyp) return meta
def create_attribute_models(self): for data in super(ExposureDetectorDriverPart, self).create_attribute_models(): yield data # Create writeable attribute for how long we should allow for detector # read out meta = NumberMeta("float64", "Time taken to readout detector", tags=[widget("textinput"), config()]) self.readout_time = meta.create_attribute_model( self.params.readoutTime) yield "readoutTime", self.readout_time, self.readout_time.set_value
def _make_scale_offset(self, field_name): group_tag = self._make_group("outputs") meta = StringMeta("Units for position fields on this block", tags=[group_tag, widget("textinput")]) self._make_field_part(field_name + ".UNITS", meta, writeable=True) meta = NumberMeta("float64", "Scale for block position fields", tags=[group_tag, widget("textinput")]) self._make_field_part(field_name + ".SCALE", meta, writeable=True) meta = NumberMeta("float64", "Offset for block position fields", tags=[group_tag, widget("textinput")]) self._make_field_part(field_name + ".OFFSET", meta, writeable=True)
def create_attribute_models(self): for data in super(PmacTrajectoryPart, self).create_attribute_models(): yield data # Create writeable attribute for the minimum time to leave when there # is a gap between frames meta = NumberMeta("float64", "Min time for any gaps between frames", tags=[widget("textinput"), config()]) self.min_turnaround = meta.create_attribute_model( self.params.minTurnaround) yield "minTurnaround", self.min_turnaround, \ self.min_turnaround.set_value
class HelloPart(Part): """Defines greet and error `Method` objects on a `Block`""" def __init__(self, params): super(HelloPart, self).__init__(params.name) @method_takes( "name", StringMeta("a name"), REQUIRED, "sleep", NumberMeta("float64", "Time to wait before returning"), 0, ) @method_returns("greeting", StringMeta(description="a greeting"), REQUIRED) def greet(self, parameters, return_map): """Optionally sleep <sleep> seconds, then return a greeting to <name>""" print("Manufacturing greeting...") time.sleep(parameters.sleep) return_map.greeting = "Hello %s" % parameters.name return return_map @method_takes() def error(self): """Raise an error""" raise RuntimeError("You called method error()")
class ScanTickerPart(ChildPart): """Provides control of a `counter_block` within a `RunnableController`""" # Generator instance generator = None # Where to start completed_steps = None # How many steps to do steps_to_do = None # When to blow up exception_step = None @RunnableController.Configure @RunnableController.PostRunArmed @RunnableController.Seek @method_takes( "generator", PointGeneratorMeta("Generator instance"), REQUIRED, "axesToMove", StringArrayMeta( "List of axes in inner dimension of generator that should be moved" ), REQUIRED, "exceptionStep", NumberMeta("int32", "If >0, raise an exception at the end of this step"), 0) def configure(self, context, completed_steps, steps_to_do, part_info, params): # If we are being asked to move if self.name in params.axesToMove: # Just store the generator and place we need to start self.generator = params.generator self.completed_steps = completed_steps self.steps_to_do = steps_to_do self.exception_step = params.exceptionStep else: # Flag nothing to do self.generator = None @RunnableController.Run @RunnableController.Resume def run(self, context, update_completed_steps): # Start time so everything is relative point_time = time.time() if self.generator: child = context.block_view(self.params.mri) for i in range(self.completed_steps, self.completed_steps + self.steps_to_do): self.log.debug("Starting point %s", i) # Get the point we are meant to be scanning point = self.generator.get_point(i) # Update the child counter_block to be the demand position position = point.positions[self.name] child.counter.put_value(position) # Wait until the next point is due point_time += point.duration wait_time = point_time - time.time() self.log.debug("%s Sleeping %s", self.name, wait_time) context.sleep(wait_time) # Update the point as being complete update_completed_steps(i + 1, self) # If this is the exception step then blow up assert i + 1 != self.exception_step, \ "Raising exception at step %s" % self.exception_step
def test_from_dict(self): nm = NumberMeta.from_dict(self.serialized) assert type(nm) == NumberMeta assert nm.description == "desc" assert nm.dtype == "float64" assert nm.tags == () assert not nm.writeable assert nm.label == "name"
def setUp(self): n = NumberMeta(description='a number') s = StringMeta(description="a string") self.meta = MapMeta() self.meta.set_elements({"a": s, "b": s}) self.meta.set_required(["a"]) self.nmeta = MapMeta() self.nmeta.set_elements({"a": n, "b": n}) self.nmeta.set_required(["a"])
class GapPluginPart(StatefulChildPart): """Gap plugin for setting the fill value""" @RunnableController.Configure @method_takes( "fillValue", NumberMeta("int32", "Fill value for stripe spacing"), 0) def configure(self, context, completed_steps, steps_to_do, part_info, params): child = context.block_view(self.params.mri) child.fillValue.put_value(params.fillValue)
def _make_time_parts(self, field_name, field_data, writeable): description = field_data.description if writeable: widget_tag = widget("textupdate") group_tag = self._make_group("parameters") else: widget_tag = widget("textinput") group_tag = self._make_group("readbacks") meta = NumberMeta("float64", description, [group_tag, widget_tag]) self._make_field_part(field_name, meta, writeable) meta = ChoiceMeta(description + " time units", ["s", "ms", "us"], tags=[group_tag, widget("combo")]) self._make_field_part(field_name + ".UNITS", meta, writeable=True)
def test_to_dict(self): s = StringMeta(description="a string") meta = MapMeta() elements = OrderedDict() elements["b"] = s elements["c"] = s elements["d"] = NumberMeta("int32") elements["e"] = s meta.set_elements(elements) m = Map(meta, {"b": "test", "d": 123, "e": "e"}) expected = OrderedDict() expected["typeid"] = "malcolm:core/Map:1.0" expected["b"] = "test" expected["d"] = 123 expected["e"] = "e" assert expected == m.to_dict()
def test_to_dict(self): nm = NumberMeta("float64", "desc", label="name") assert nm.to_dict() == self.serialized
def test_unsigned_validates(self): nm = NumberMeta("uint32") assert nm.validate("22") == 22 assert nm.validate(-22) == 2**32 - 22
def test_none_validates(self): nm = NumberMeta("int32") assert 0 == nm.validate(None)
from malcolm.core import method_takes, REQUIRED, Importer from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta @method_takes("name", StringMeta("The name of the defined parameter"), REQUIRED, "value", StringMeta("The value of the defined parameter"), REQUIRED) def string(params): """Define a string parameter to be used within this YAML file""" return {params.name: params.value} @method_takes("name", StringMeta("The name of the defined parameter"), REQUIRED, "value", NumberMeta("float64", "The value of the defined parameter"), REQUIRED) def float64(params): """Define a string parameter to be used within this YAML file""" return {params.name: params.value} @method_takes("name", StringMeta("The name of the defined parameter"), REQUIRED, "value", NumberMeta("int32", "The value of the defined parameter"), REQUIRED) def int32(params): """Define a string parameter to be used within this YAML file""" return {params.name: params.value} @method_takes("value", StringMeta("The docstring value"), REQUIRED)
def create_attribute_models(self): for data in super(RunnableController, self).create_attribute_models(): yield data # Create sometimes writeable attribute for the current completed scan # step completed_steps_meta = NumberMeta( "int32", "Readback of number of scan steps", tags=[widget("textinput")]) completed_steps_meta.set_writeable_in(ss.PAUSED, ss.ARMED) self.completed_steps = completed_steps_meta.create_attribute_model(0) yield "completedSteps", self.completed_steps, self.set_completed_steps # Create read-only attribute for the number of configured scan steps configured_steps_meta = NumberMeta( "int32", "Number of steps currently configured", tags=[widget("textupdate")]) self.configured_steps = configured_steps_meta.create_attribute_model(0) yield "configuredSteps", self.configured_steps, None # Create read-only attribute for the total number scan steps total_steps_meta = NumberMeta( "int32", "Readback of number of scan steps", tags=[widget("textupdate")]) self.total_steps = total_steps_meta.create_attribute_model(0) yield "totalSteps", self.total_steps, None # Create sometimes writeable attribute for the default axis names axes_to_move_meta = StringArrayMeta( "Default axis names to scan for configure()", tags=[widget("table"), config()]) axes_to_move_meta.set_writeable_in(ss.READY) self.axes_to_move = axes_to_move_meta.create_attribute_model( self.params.axesToMove) yield "axesToMove", self.axes_to_move, self.set_axes_to_move
import h5py as h5 from vdsgen.subframevdsgenerator import SubFrameVDSGenerator from malcolm.modules.scanning.controllers import RunnableController from malcolm.core import method_takes, REQUIRED, Part from malcolm.modules.ADCore.infos import DatasetProducedInfo from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta from malcolm.modules.scanpointgenerator.vmetas import PointGeneratorMeta # Number of points too look ahead of the current id index to account for dropped frames NUM_LOOKAHEAD = 100 @method_takes("name", StringMeta("Name of part"), REQUIRED, "dataType", StringMeta("Data type of dataset"), REQUIRED, "stripeHeight", NumberMeta("int16", "Height of stripes"), REQUIRED, "stripeWidth", NumberMeta("int16", "Width of stripes"), REQUIRED) class VDSWrapperPart(Part): # Constants for class RAW_FILE_TEMPLATE = "FEM{}" OUTPUT_FILE = "EXCALIBUR" CREATE = "w" APPEND = "a" READ = "r" ID = "/entry/NDAttributes/NDArrayUniqueId" SUM = "/entry/sum/sum" required_nodes = ["/entry/detector", "/entry/sum", "/entry/NDAttributes"] set_bases = ["/entry/detector", "/entry/sum"] default_node_tree = [
class VDSWrapperPart(Part): # Constants for class RAW_FILE_TEMPLATE = "FEM{}" OUTPUT_FILE = "EXCALIBUR" CREATE = "w" APPEND = "a" READ = "r" ID = "/entry/NDAttributes/NDArrayUniqueId" SUM = "/entry/sum/sum" required_nodes = ["/entry/detector", "/entry/sum", "/entry/NDAttributes"] set_bases = ["/entry/detector", "/entry/sum"] default_node_tree = [ "/entry/detector/axes", "/entry/detector/signal", "/entry/sum/axes", "/entry/sum/signal" ] def __init__(self, params): self.params = params super(VDSWrapperPart, self).__init__(params.name) self.current_id = None self.done_when_reaches = None self.generator = None self.fems = [1, 2, 3, 4, 5, 6] self.vds_path = "" self.vds = None self.command = [] self.raw_paths = [] self.raw_datasets = [] self.data_type = params.dataType self.stripe_height = params.stripeHeight self.stripe_width = params.stripeWidth @RunnableController.Abort @RunnableController.Reset @RunnableController.PostRunReady def abort(self, context): self.close_files() def close_files(self): for file_ in self.raw_datasets + [self.vds]: if file_ is not None and file_.id.valid: self.log.info("Closing file %s", file_) file_.close() self.raw_datasets = [] self.vds = None def _create_dataset_infos(self, generator, filename): uniqueid_path = "/entry/NDAttributes/NDArrayUniqueId" data_path = "/entry/detector/detector" sum_path = "/entry/sum/sum" generator_rank = len(generator.axes) # Create the main detector data yield DatasetProducedInfo(name="EXCALIBUR.data", filename=filename, type="primary", rank=2 + generator_rank, path=data_path, uniqueid=uniqueid_path) # # And the sum # yield DatasetProducedInfo( # name="EXCALIBUR.sum", # filename=filename, # type="secondary", # rank=2 + generator_rank, # path=sum_path, # uniqueid=uniqueid_path) # Add any setpoint dimensions for axis in generator.axes: yield DatasetProducedInfo(name="%s.value_set" % axis, filename=filename, type="position_set", rank=1, path="/entry/detector/%s_set" % axis, uniqueid="") @RunnableController.Configure @method_takes( "generator", PointGeneratorMeta("Generator instance"), REQUIRED, "fileDir", StringMeta("File dir to write HDF files into"), REQUIRED, "fileTemplate", StringMeta( """Printf style template to generate filename relative to fileDir. Arguments are: 1) %s: EXCALIBUR"""), "%s.h5", "fillValue", NumberMeta("int32", "Fill value for stripe spacing"), 0) def configure(self, context, completed_steps, steps_to_do, part_info, params): print "Configure" self.generator = params.generator self.current_id = completed_steps self.done_when_reaches = completed_steps + steps_to_do self.vds_path = os.path.join(params.fileDir, params.fileTemplate % self.OUTPUT_FILE) raw_file_path = params.fileTemplate % self.RAW_FILE_TEMPLATE.format(1) node_tree = list(self.default_node_tree) for axis in params.generator.axes: for base in self.set_bases: node_tree.append(base + "/{}_set".format(axis)) node_tree.append(base + "/{}_set_indices".format(axis)) with h5.File(self.vds_path, self.CREATE, libver="latest") as self.vds: for node in self.required_nodes: self.vds.require_group(node) for node in node_tree: self.vds[node] = h5.ExternalLink(raw_file_path, node) # Create placeholder id and sum datasets initial_dims = tuple([1 for _ in params.generator.shape]) initial_shape = initial_dims + (1, 1) max_shape = params.generator.shape + (1, 1) self.vds.create_dataset(self.ID, initial_shape, maxshape=max_shape, dtype="int32") self.vds.create_dataset(self.SUM, initial_shape, maxshape=max_shape, dtype="float64", fillvalue=np.nan) files = [ params.fileTemplate % self.RAW_FILE_TEMPLATE.format(fem) for fem in self.fems ] shape = params.generator.shape + (self.stripe_height, self.stripe_width) # Create the VDS using vdsgen fgen = SubFrameVDSGenerator( params.fileDir, prefix=None, files=files, output=params.fileTemplate % self.OUTPUT_FILE, source=dict(shape=shape, dtype=self.data_type), source_node="/entry/detector/detector", target_node="/entry/detector/detector", stripe_spacing=0, module_spacing=121, fill_value=params.fillValue, log_level=1 # DEBUG ) print fgen fgen.generate_vds() # Store required attributes self.raw_paths = [ os.path.abspath(os.path.join(params.fileDir, file_)) for file_ in files ] # Open the VDS self.vds = h5.File(self.vds_path, self.APPEND, libver="latest", swmr=True) # Return the dataset information dataset_infos = list( self._create_dataset_infos(params.generator, params.fileTemplate % self.OUTPUT_FILE)) return dataset_infos # @RunnableController.PostRunArmed # @RunnableController.Seek # def seek(self, context, completed_steps, steps_to_do, part_info): # self.current_id = completed_steps # self.done_when_reaches = completed_steps + steps_to_do # # @RunnableController.Run # @RunnableController.Resume # def run(self, context, update_completed_steps): # self.log.info("VDS part running") # if not self.raw_datasets: # for path_ in self.raw_paths: # self.log.info("Waiting for file %s to be created", path_) # while not os.path.exists(path_): # context.sleep(1) # self.raw_datasets.append( # h5.File(path_, self.READ, libver="latest", swmr=True)) # for dataset in self.raw_datasets: # self.log.info("Waiting for id in file %s", dataset) # while self.ID not in dataset: # context.sleep(0.1) # # here I should grab the handles to the vds dataset, id and all the swmr datasets and ids. # if self.vds.id.valid and self.ID in self.vds: # self.vds.swmr_mode = True # self.vds_sum = self.vds[self.SUM] # self.vds_id = self.vds[self.ID] # self.fems_sum = [ix[self.SUM] for ix in self.raw_datasets] # self.fems_id = [ix[self.ID] for ix in self.raw_datasets] # else: # self.log.warning("File %s does not exist or does not have a " # "UniqueIDArray, returning 0", file_) # return 0 # # self.previous_idx = 0 # # does this on every run # try: # self.log.info("Monitoring raw files until ID reaches %s", # self.done_when_reaches) # while self.current_id < self.done_when_reaches: # monitor the output of the vds id. When it counts up then we have finished. # context.sleep(0.1) # Allow while loop to be aborted # #cProfile.runctx('self.maybe_update_datasets()', globals(), locals(), filename="/dls/tmp/qvr31998/VDSstats") # self.maybe_update_datasets() # # except Exception as error: # self.log.exception("Error in run. Message:\n%s", error.message) # self.close_files() # raise def maybe_update_datasets(self): #self.log.info("VDS: updating") id_shapes = [] sum_shapes = [] #self.log.info("VDS: fems ids: %s", self.fems_id) # First update the id datasets and store their shapes for id in self.fems_id: id.refresh() id_shapes.append(np.array(id.shape)) # Only refresh once, this should move to resize_vds when we are guaranteed id updates AFTER sum for s in self.fems_sum: s.refresh() sum_shapes.append(np.array(s.shape)) #self.log.info("Shapes: %s", shapes) # Now iterate through the indexes, updating ids and sums if needed ###TODO: This doesn't seem to actually iterate - just does the last one. indexes = self.get_indexes_to_check() self.log.info("VDS: Indexes to checK: %s", indexes) need_updates = True for index in indexes: # For some reason, at certain point all the ids come back as zeroes. for i, id in enumerate(self.fems_id): if not self.index_in_range(index, id_shapes[i]): self.log.info("VDS: ID Index out of range: %s", index) return elif not self.index_in_range(index, sum_shapes[i]): self.log.info("VDS: SUM Index out of range: %s", index) return else: fem_id = id[index] if fem_id == 0: self.log.info("VDS: FEM%d not written data %s yet", i + 1, index) return else: assert fem_id == self.current_id + 1, \ "VDS: FEM%d wrote %d in index %s when expecting %s" % ( i + 1, fem_id, index, self.current_id + 1) #self.log.info("VDS: processing id: %s", self.current_id + 1) #self.log.info("VDS: index %s", index) if need_updates: self.resize_vds(id_shapes[0]) need_updates = False self.update_id_sum(index) self.current_id += 1 #self.log.info("ID reached: %s", self.current_id) self.flush_id_sum() def resize_vds(self, shape): self.vds_sum.resize(shape) self.vds_id.resize(shape) # source and target are now the same shape def update_id_sum(self, index): self.vds_sum[index] = sum(s[index] for s in self.fems_sum) self.vds_id[index] = self.current_id + 1 def flush_id_sum(self): self.vds_sum.flush() self.vds_id.flush() # flush to disc def index_in_range(self, index, shape): # check the given index is valid for the shape of the array in_range = index < np.array(shape)[:len(index)] return np.all(in_range) def get_indexes_to_check(self): # returns the indexes that we should check for updates #self.log.info("VDS:Checking in range %s, %s", self.current_id, self.done_when_reaches) for idx in range(self.current_id, self.done_when_reaches): index = tuple(self.generator.get_point(idx).indexes) #self.log.info("VDS: Yielding %s", index) yield index
def test_init(self): nm = NumberMeta("float32") assert nm.typeid == "malcolm:core/NumberMeta:1.0" assert nm.dtype == "float32" assert nm.label == ""
def create_attribute_models(self): # Create writeable attribute for current counter value meta = NumberMeta("float64", "A counter", tags=[config()]) self.counter = meta.create_attribute_model() yield "counter", self.counter, self.counter.set_value
from tornado.ioloop import IOLoop from tornado.websocket import websocket_connect from malcolm.modules.builtin.controllers import ClientComms from malcolm.core import Subscribe, deserialize_object, method_also_takes, \ json_decode, json_encode, Response, Error, Unsubscribe, Update, Return, \ Queue, TimeoutError from malcolm.modules.builtin.vmetas import StringMeta, NumberMeta, \ StringArrayMeta from malcolm.tags import widget @method_also_takes("hostname", StringMeta("Hostname of malcolm websocket server"), "localhost", "port", NumberMeta("int32", "Port number to run up under"), 8080, "connectTimeout", NumberMeta("float64", "Time to wait for connection"), 5.0) class WebsocketClientComms(ClientComms): """A class for a client to communicate with the server""" use_cothread = False # Attribute remote_blocks = None loop = None _conn = None _spawned = None _connected_queue = None # {new_id: (request, old_id} _request_lookup = None # {Subscribe.generator_key(): Subscribe}
def test_float_against_float32(self): nm = NumberMeta("float32") self.assertAlmostEqual(123.456, nm.validate(123.456), places=5)
"name", StringMeta( "Specify that this class will take a parameter name"), REQUIRED, "description", StringMeta( "Description of this parameter"), REQUIRED, "default", StringMeta(default_desc), OPTIONAL) def string(params): """Add a string parameter to be passed when instantiating this YAML file""" return args_for_takes(params, StringMeta) @method_takes( "name", StringMeta( "Specify that this class will take a parameter name"), REQUIRED, "description", StringMeta( "Description of this parameter"), REQUIRED, "default", NumberMeta("float64", default_desc), OPTIONAL) def float64(params): """Add a float64 parameter to be passed when instantiating this YAML file""" return args_for_takes(params, NumberMeta, "float64") @method_takes( "name", StringMeta( "Specify that this class will take a parameter name"), REQUIRED, "description", StringMeta( "Description of this parameter"), REQUIRED, "default", NumberMeta("int32", default_desc), OPTIONAL) def int32(params): """Add an int32 parameter to be passed when instantiating this YAML file""" return args_for_takes(params, NumberMeta, "int32")
def test_float_against_float64(self): nm = NumberMeta("float64") assert 123.456 == nm.validate(123.456)
class VDSWrapperPart(Part): # Constants for class RAW_FILE_TEMPLATE = "FEM{}" OUTPUT_FILE = "EXCALIBUR" CREATE = "w" APPEND = "a" READ = "r" ID = "/entry/NDAttributes/NDArrayUniqueId" SUM = "/entry/sum/sum" required_nodes = ["/entry/detector", "/entry/sum", "/entry/NDAttributes"] set_bases = ["/entry/detector", "/entry/sum"] default_node_tree = [ "/entry/detector/axes", "/entry/detector/signal", "/entry/sum/axes", "/entry/sum/signal" ] def __init__(self, params): self.params = params super(VDSWrapperPart, self).__init__(params.name) self.current_idx = None self.done_when_reaches = None self.generator = None self.fems = [1, 2, 3, 4, 5, 6] self.vds_path = "" self.vds = None self.command = [] self.raw_paths = [] self.raw_datasets = [] self.data_type = params.dataType self.stripe_height = params.stripeHeight self.stripe_width = params.stripeWidth @RunnableController.Abort @RunnableController.Reset @RunnableController.PostRunReady def abort(self, context): self.close_files() def close_files(self): for file_ in self.raw_datasets + [self.vds]: if file_ is not None and file_.id.valid: self.log.info("Closing file %s", file_) file_.close() self.raw_datasets = [] self.vds = None def _create_dataset_infos(self, generator, filename): uniqueid_path = "/entry/NDAttributes/NDArrayUniqueId" data_path = "/entry/detector/detector" sum_path = "/entry/sum/sum" generator_rank = len(generator.axes) # Create the main detector data yield DatasetProducedInfo(name="EXCALIBUR.data", filename=filename, type="primary", rank=2 + generator_rank, path=data_path, uniqueid=uniqueid_path) # And the sum yield DatasetProducedInfo(name="EXCALIBUR.sum", filename=filename, type="secondary", rank=2 + generator_rank, path=sum_path, uniqueid=uniqueid_path) # Add any setpoint dimensions for axis in generator.axes: yield DatasetProducedInfo(name="%s.value_set" % axis, filename=filename, type="position_set", rank=1, path="/entry/detector/%s_set" % axis, uniqueid="") @RunnableController.Configure @method_takes( "generator", PointGeneratorMeta("Generator instance"), REQUIRED, "fileDir", StringMeta("File dir to write HDF files into"), REQUIRED, "fileTemplate", StringMeta( """Printf style template to generate filename relative to fileDir. Arguments are: 1) %s: EXCALIBUR"""), "%s.h5", "fillValue", NumberMeta("int32", "Fill value for stripe spacing"), 0) def configure(self, context, completed_steps, steps_to_do, part_info, params): self.generator = params.generator self.current_idx = completed_steps self.done_when_reaches = completed_steps + steps_to_do self.vds_path = os.path.join(params.fileDir, params.fileTemplate % self.OUTPUT_FILE) raw_file_path = params.fileTemplate % self.RAW_FILE_TEMPLATE.format(1) node_tree = list(self.default_node_tree) for axis in params.generator.axes: for base in self.set_bases: node_tree.append(base + "/{}_set".format(axis)) node_tree.append(base + "/{}_set_indices".format(axis)) with h5.File(self.vds_path, self.CREATE, libver="latest") as self.vds: for node in self.required_nodes: self.vds.require_group(node) for node in node_tree: self.vds[node] = h5.ExternalLink(raw_file_path, node) # Create placeholder id and sum datasets initial_dims = tuple([1 for _ in params.generator.shape]) initial_shape = initial_dims + (1, 1) max_shape = params.generator.shape + (1, 1) self.vds.create_dataset(self.ID, initial_shape, maxshape=max_shape, dtype="int32") self.vds.create_dataset(self.SUM, initial_shape, maxshape=max_shape, dtype="float64", fillvalue=np.nan) files = [ params.fileTemplate % self.RAW_FILE_TEMPLATE.format(fem) for fem in self.fems ] shape = params.generator.shape + (self.stripe_height, self.stripe_width) # Create the VDS using vdsgen fgen = SubFrameVDSGenerator( params.fileDir, prefix=None, files=files, output=params.fileTemplate % self.OUTPUT_FILE, source=dict(shape=shape, dtype=self.data_type), source_node="/entry/detector/detector", target_node="/entry/detector/detector", stripe_spacing=0, module_spacing=121, fill_value=params.fillValue, log_level=1 # DEBUG ) fgen.generate_vds() # Store required attributes self.raw_paths = [ os.path.abspath(os.path.join(params.fileDir, file_)) for file_ in files ] # Open the VDS self.vds = h5.File(self.vds_path, self.APPEND, libver="latest", swmr=True) # Return the dataset information dataset_infos = list( self._create_dataset_infos(params.generator, params.fileTemplate % self.OUTPUT_FILE)) return dataset_infos @RunnableController.PostRunArmed @RunnableController.Seek def seek(self, context, completed_steps, steps_to_do, part_info): self.current_idx = completed_steps self.done_when_reaches = completed_steps + steps_to_do @RunnableController.Run @RunnableController.Resume def run(self, context, update_completed_steps): if not self.raw_datasets: for path_ in self.raw_paths: self.log.info("Waiting for file %s to be created", path_) while not os.path.exists(path_): context.sleep(1) self.raw_datasets.append( h5.File(path_, self.READ, libver="latest", swmr=True)) for dataset in self.raw_datasets: self.log.info("Waiting for id in file %s", dataset) while self.ID not in dataset: context.sleep(0.1) # here I should grab the handles to the vds dataset, id and all the swmr datasets and ids. if self.vds.id.valid and self.ID in self.vds: self.vds.swmr_mode = True self.vds_sum = self.vds[self.SUM] self.vds_id = self.vds[self.ID] self.fems_sum = [ix[self.SUM] for ix in self.raw_datasets] self.fems_id = [ix[self.ID] for ix in self.raw_datasets] else: self.log.warning( "File %s does not exist or does not have a " "UniqueIDArray, returning 0", file_) return 0 self.previous_idx = 0 # does this on every run try: self.log.info("Monitoring raw files until ID reaches %s", self.done_when_reaches) while self.current_idx < self.done_when_reaches: # monitor the output of the vds id. When it counts up then we have finished. context.sleep(0.1) # Allow while loop to be aborted indexes = self.get_modify_slices() self.maybe_update_datasets(indexes) except Exception as error: self.log.exception("Error in run. Message:\n%s", error.message) self.close_files() raise def maybe_update_datasets(self, indexes): ids = [] for id in self.fems_id: id.refresh() shape = id.shape # Only select the ones in range if isinstance(indexes, tuple): # One index, unpacked if self.index_in_range(indexes, shape): valid_indexes = indexes else: valid_indexes = None else: valid_indexes = tuple(i for i in indexes if self.index_in_range(i, shape)) if valid_indexes: ids.append(max(id[valid_indexes])) else: # Not ready yet, don't process return if min(ids) > self.current_idx: # if the the fem with the lowest id is less than the vds id self.update_sum(indexes) # update the sum index self.update_id(indexes) # update the id index self.log.info("ID reached: %s", self.current_idx) def index_in_range(self, index, shape): # check the given index is valid for the shape of the array in_range = index < np.array(shape)[:len(index)] return np.all(in_range) def update_id(self, indexes): self.vds_id.resize( self.fems_id[0].shape) # source and target are now the same shape new_ids = self.fems_id[0][indexes] for id in self.fems_id[1:]: new_ids = np.minimum(new_ids, id[indexes]) self.vds_id[indexes] = new_ids # set the updated values self.current_idx = max(new_ids) self.vds_id.flush() # flush to disc def update_sum(self, indexes): self.fems_sum[0].refresh() new_shape = self.fems_sum[ 0].shape # get the shape that we have gotten to. self.vds_sum.refresh() self.vds_sum.resize( new_shape) # source and target are now the same size fems_sum = self.fems_sum[0][indexes] for fem in self.fems_sum[1:]: fem.refresh() fems_sum += fem[indexes] self.vds_sum[indexes] = fems_sum self.vds_sum.flush() def get_modify_slices(self): # returns the slices we want to modify indexes = [] end_idx = min(self.current_idx + NUM_LOOKAHEAD, self.done_when_reaches) for idx in range(self.current_idx, end_idx): indexes.append(self.generator.get_point(idx).indexes) if len(indexes) == 1: # if indexes = [[0, 4]], return something like (0, 4) return tuple(indexes[0]) else: # if indexes = [[0, 4], [0, 5]], return something like [(0, 0), (4, 5)] return zip(*indexes)
def test_int_against_float(self): nm = NumberMeta("float64") assert 123 == nm.validate(123)
class RunnableController(ManagerController): """RunnableDevice implementer that also exposes GUI for child parts""" # The stateSet that this controller implements stateSet = ss() Validate = Hook() """Called at validate() to check parameters are valid Args: context (Context): The context that should be used to perform operations on child blocks part_info (dict): {part_name: [Info]} returned from ReportStatus params (Map): Any configuration parameters asked for by part validate() method_takes() decorator Returns: [`ParameterTweakInfo`] - any parameters tweaks that have occurred to make them compatible with this part. If any are returned, Validate will be re-run with the modified parameters. """ ReportStatus = Hook() """Called before Validate, Configure, PostRunArmed and Seek hooks to report the current configuration of all parts Args: context (Context): The context that should be used to perform operations on child blocks Returns: [`Info`] - any configuration Info objects relevant to other parts """ Configure = Hook() """Called at configure() to configure child block for a run Args: context (Context): The context that should be used to perform operations on child blocks completed_steps (int): Number of steps already completed steps_to_do (int): Number of steps we should configure for part_info (dict): {part_name: [Info]} returned from ReportStatus params (Map): Any configuration parameters asked for by part configure() method_takes() decorator Returns: [`Info`] - any Info objects that need to be passed to other parts for storing in attributes """ PostConfigure = Hook() """Called at the end of configure() to store configuration info calculated in the Configure hook Args: context (Context): The context that should be used to perform operations on child blocks part_info (dict): {part_name: [Info]} returned from Configure hook """ Run = Hook() """Called at run() to start the configured steps running Args: context (Context): The context that should be used to perform operations on child blocks update_completed_steps (callable): If part can report progress, this part should call update_completed_steps(completed_steps, self) with the integer step value each time progress is updated """ PostRunArmed = Hook() """Called at the end of run() when there are more steps to be run Args: context (Context): The context that should be used to perform operations on child blocks completed_steps (int): Number of steps already completed steps_to_do (int): Number of steps we should configure for part_info (dict): {part_name: [Info]} returned from ReportStatus params (Map): Any configuration parameters asked for by part configure() method_takes() decorator """ PostRunReady = Hook() """Called at the end of run() when there are no more steps to be run Args: context (Context): The context that should be used to perform operations on child blocks """ Pause = Hook() """Called at pause() to pause the current scan before Seek is called Args: context (Context): The context that should be used to perform operations on child blocks """ Seek = Hook() """Called at seek() or at the end of pause() to reconfigure for a different number of completed_steps Args: context (Context): The context that should be used to perform operations on child blocks completed_steps (int): Number of steps already completed steps_to_do (int): Number of steps we should configure for part_info (dict): {part_name: [Info]} returned from ReportStatus params (Map): Any configuration parameters asked for by part configure() method_takes() decorator """ Resume = Hook() """Called at resume() to continue a paused scan Args: context (Context): The context that should be used to perform operations on child blocks update_completed_steps (callable): If part can report progress, this part should call update_completed_steps(completed_steps, self) with the integer step value each time progress is updated """ Abort = Hook() """Called at abort() to stop the current scan Args: context (Context): The context that should be used to perform operations on child blocks """ # Attributes completed_steps = None configured_steps = None total_steps = None axes_to_move = None # Params passed to configure() configure_params = None # Shared contexts between Configure, Run, Pause, Seek, Resume part_contexts = None # Configure method_models # {part: configure_method_model} configure_method_models = None # Stored for pause steps_per_run = 0 # Progress reporting dict # {part: completed_steps for that part} progress_updates = None # Queue so that do_run can wait to see why it was aborted and resume if # needed resume_queue = None # Queue so we can wait for aborts to complete abort_queue = None @method_writeable_in(ss.FAULT, ss.DISABLED, ss.ABORTED, ss.ARMED) def reset(self): # Override reset to work from aborted too super(RunnableController, self).reset() def create_attribute_models(self): for data in super(RunnableController, self).create_attribute_models(): yield data # Create sometimes writeable attribute for the current completed scan # step completed_steps_meta = NumberMeta( "int32", "Readback of number of scan steps", tags=[widget("textinput")]) completed_steps_meta.set_writeable_in(ss.PAUSED, ss.ARMED) self.completed_steps = completed_steps_meta.create_attribute_model(0) yield "completedSteps", self.completed_steps, self.set_completed_steps # Create read-only attribute for the number of configured scan steps configured_steps_meta = NumberMeta( "int32", "Number of steps currently configured", tags=[widget("textupdate")]) self.configured_steps = configured_steps_meta.create_attribute_model(0) yield "configuredSteps", self.configured_steps, None # Create read-only attribute for the total number scan steps total_steps_meta = NumberMeta( "int32", "Readback of number of scan steps", tags=[widget("textupdate")]) self.total_steps = total_steps_meta.create_attribute_model(0) yield "totalSteps", self.total_steps, None # Create sometimes writeable attribute for the default axis names axes_to_move_meta = StringArrayMeta( "Default axis names to scan for configure()", tags=[widget("table"), config()]) axes_to_move_meta.set_writeable_in(ss.READY) self.axes_to_move = axes_to_move_meta.create_attribute_model( self.params.axesToMove) yield "axesToMove", self.axes_to_move, self.set_axes_to_move def do_init(self): self.part_contexts = {} # Populate configure args from any child method hooked to Configure. # If we have runnablechildparts, they will call update_configure_args # during do_init self.configure_method_models = {} # Look for all parts that hook into Configure for part, func_name in self._hooked_func_names[self.Configure].items(): if func_name in part.method_models: self.update_configure_args(part, part.method_models[func_name]) super(RunnableController, self).do_init() def do_reset(self): super(RunnableController, self).do_reset() self.configured_steps.set_value(0) self.completed_steps.set_value(0) self.total_steps.set_value(0) def update_configure_args(self, part, configure_model): """Tell controller part needs different things passed to Configure""" with self.changes_squashed: # Update the dict self.configure_method_models[part] = configure_model method_models = list(self.configure_method_models.values()) # Update takes with the things we need default_configure = MethodModel.from_dict( RunnableController.configure.MethodModel.to_dict()) default_configure.defaults["axesToMove"] = self.axes_to_move.value method_models.append(default_configure) # Decorate validate and configure with the sum of its parts self._block.validate.recreate_from_others(method_models) self._block.validate.set_returns(self._block.validate.takes) self._block.configure.recreate_from_others(method_models) def set_axes_to_move(self, value): self.axes_to_move.set_value(value) @method_takes(*configure_args) @method_returns(*validate_args) def validate(self, params, returns): """Validate configuration parameters and return validated parameters. Doesn't take device state into account so can be run in any state """ iterations = 10 # Make some tasks just for validate part_contexts = self.create_part_contexts() # Get any status from all parts status_part_info = self.run_hook(self.ReportStatus, part_contexts) while iterations > 0: # Try up to 10 times to get a valid set of parameters iterations -= 1 # Validate the params with all the parts validate_part_info = self.run_hook( self.Validate, part_contexts, status_part_info, **params) tweaks = ParameterTweakInfo.filter_values(validate_part_info) if tweaks: for tweak in tweaks: params[tweak.parameter] = tweak.value self.log.debug( "Tweaking %s to %s", tweak.parameter, tweak.value) else: # Consistent set, just return the params return params raise ValueError("Could not get a consistent set of parameters") def abortable_transition(self, state): with self._lock: # We might have been aborted just now, so this will fail # with an AbortedError if we were self.part_contexts[self].sleep(0) self.transition(state) @method_takes(*configure_args) @method_writeable_in(ss.READY) def configure(self, params): """Validate the params then configure the device ready for run(). Try to prepare the device as much as possible so that run() is quick to start, this may involve potentially long running activities like moving motors. Normally it will return in Armed state. If the user aborts then it will return in Aborted state. If something goes wrong it will return in Fault state. If the user disables then it will return in Disabled state. """ self.validate(params, params) try: self.transition(ss.CONFIGURING) self.do_configure(params) self.abortable_transition(ss.ARMED) except AbortedError: self.abort_queue.put(None) raise except Exception as e: self.go_to_error_state(e) raise def do_configure(self, params): # These are the part tasks that abort() and pause() will operate on self.part_contexts = self.create_part_contexts() # Tell these contexts to notify their parts that about things they # modify so it doesn't screw up the modified led for part, context in self.part_contexts.items(): context.set_notify_dispatch_request(part.notify_dispatch_request) # So add one for ourself too so we can be aborted self.part_contexts[self] = Context(self.process) # Store the params for use in seek() self.configure_params = params # This will calculate what we need from the generator, possibly a long # call params.generator.prepare() # Set the steps attributes that we will do across many run() calls self.total_steps.set_value(params.generator.size) self.completed_steps.set_value(0) self.configured_steps.set_value(0) # TODO: We can be cleverer about this and support a different number # of steps per run for each run by examining the generator structure self.steps_per_run = self._get_steps_per_run( params.generator, params.axesToMove) # Get any status from all parts part_info = self.run_hook(self.ReportStatus, self.part_contexts) # Run the configure command on all parts, passing them info from # ReportStatus. Parts should return any reporting info for PostConfigure completed_steps = 0 steps_to_do = self.steps_per_run part_info = self.run_hook( self.Configure, self.part_contexts, completed_steps, steps_to_do, part_info, **self.configure_params) # Take configuration info and reflect it as attribute updates self.run_hook(self.PostConfigure, self.part_contexts, part_info) # Update the completed and configured steps self.configured_steps.set_value(steps_to_do) # Reset the progress of all child parts self.progress_updates = {} self.resume_queue = Queue() def _get_steps_per_run(self, generator, axes_to_move): steps = 1 axes_set = set(axes_to_move) for dim in reversed(generator.dimensions): # If the axes_set is empty then we are done if not axes_set: break # Consume the axes that this generator scans for axis in dim.axes: assert axis in axes_set, \ "Axis %s is not in %s" % (axis, axes_to_move) axes_set.remove(axis) # Now multiply by the dimensions to get the number of steps steps *= dim.size return steps @method_writeable_in(ss.ARMED) def run(self): """Run a device where configure() has already be called Normally it will return in Ready state. If setup for multiple-runs with a single configure() then it will return in Armed state. If the user aborts then it will return in Aborted state. If something goes wrong it will return in Fault state. If the user disables then it will return in Disabled state. """ if self.configured_steps.value < self.total_steps.value: next_state = ss.ARMED else: next_state = ss.READY try: self.transition(ss.RUNNING) hook = self.Run going = True while going: try: self.do_run(hook) except AbortedError: self.abort_queue.put(None) # Wait for a response on the resume_queue should_resume = self.resume_queue.get() if should_resume: # we need to resume hook = self.Resume self.log.debug("Resuming run") else: # we don't need to resume, just drop out raise else: going = False self.abortable_transition(next_state) except AbortedError: raise except Exception as e: self.go_to_error_state(e) raise def do_run(self, hook): self.run_hook(hook, self.part_contexts, self.update_completed_steps) self.abortable_transition(ss.POSTRUN) completed_steps = self.configured_steps.value if completed_steps < self.total_steps.value: steps_to_do = self.steps_per_run part_info = self.run_hook(self.ReportStatus, self.part_contexts) self.completed_steps.set_value(completed_steps) self.run_hook( self.PostRunArmed, self.part_contexts, completed_steps, steps_to_do, part_info, **self.configure_params) self.configured_steps.set_value(completed_steps + steps_to_do) else: self.run_hook(self.PostRunReady, self.part_contexts) def update_completed_steps(self, completed_steps, part): with self._lock: # Update self.progress_updates[part] = completed_steps min_completed_steps = min(self.progress_updates.values()) if min_completed_steps > self.completed_steps.value: self.completed_steps.set_value(min_completed_steps) @method_writeable_in( ss.READY, ss.CONFIGURING, ss.ARMED, ss.RUNNING, ss.POSTRUN, ss.PAUSED, ss.SEEKING) def abort(self): """Abort the current operation and block until aborted Normally it will return in Aborted state. If something goes wrong it will return in Fault state. If the user disables then it will return in Disabled state. """ # Tell _call_do_run not to resume if self.resume_queue: self.resume_queue.put(False) self.try_aborting_function(ss.ABORTING, ss.ABORTED, self.do_abort) def do_abort(self): self.run_hook(self.Abort, self.create_part_contexts()) def try_aborting_function(self, start_state, end_state, func, *args): try: # To make the running function fail we need to stop any running # contexts (if running a hook) or make transition() fail with # AbortedError. Both of these are accomplished here with self._lock: original_state = self.state.value self.abort_queue = Queue() self.transition(start_state) for context in self.part_contexts.values(): context.stop() if original_state not in (ss.READY, ss.ARMED, ss.PAUSED): # Something was running, let it finish aborting try: self.abort_queue.get(timeout=ABORT_TIMEOUT) except TimeoutError: self.log.warning("Timeout waiting while %s" % start_state) with self._lock: # Now we've waited for a while we can remove the error state # for transition in case a hook triggered it rather than a # transition self.part_contexts[self].ignore_stops_before_now() func(*args) self.abortable_transition(end_state) except AbortedError: self.abort_queue.put(None) raise except Exception as e: # pylint:disable=broad-except self.go_to_error_state(e) raise def set_completed_steps(self, completed_steps): """Seek a Armed or Paused scan back to another value Normally it will return in the state it started in. If the user aborts then it will return in Aborted state. If something goes wrong it will return in Fault state. If the user disables then it will return in Disabled state. """ call_with_params(self.pause, completedSteps=completed_steps) @method_writeable_in(ss.ARMED, ss.PAUSED, ss.RUNNING) @method_takes("completedSteps", NumberMeta( "int32", "Step to mark as the last completed step, -1 for current"), -1) def pause(self, params): """Pause a run() so that resume() can be called later. The original call to run() will not be interrupted by pause(), it will with until the scan completes or is aborted. Normally it will return in Paused state. If the user aborts then it will return in Aborted state. If something goes wrong it will return in Fault state. If the user disables then it will return in Disabled state. """ current_state = self.state.value if params.completedSteps < 0: completed_steps = self.completed_steps.value else: completed_steps = params.completedSteps if current_state == ss.RUNNING: next_state = ss.PAUSED else: next_state = current_state assert completed_steps < self.total_steps.value, \ "Cannot seek to after the end of the scan" self.try_aborting_function( ss.SEEKING, next_state, self.do_pause, completed_steps) def do_pause(self, completed_steps): self.run_hook(self.Pause, self.create_part_contexts()) in_run_steps = completed_steps % self.steps_per_run steps_to_do = self.steps_per_run - in_run_steps part_info = self.run_hook(self.ReportStatus, self.part_contexts) self.completed_steps.set_value(completed_steps) self.run_hook( self.Seek, self.part_contexts, completed_steps, steps_to_do, part_info, **self.configure_params) self.configured_steps.set_value(completed_steps + steps_to_do) @method_writeable_in(ss.PAUSED) def resume(self): """Resume a paused scan. Normally it will return in Running state. If something goes wrong it will return in Fault state. """ self.transition(ss.RUNNING) self.resume_queue.put(True) # self.run will now take over def do_disable(self): # Abort anything that is currently running, but don't wait for context in self.part_contexts.values(): context.stop() if self.resume_queue: self.resume_queue.put(False) super(RunnableController, self).do_disable()
def test_int_against_int(self): nm = NumberMeta("int32") assert 123 == nm.validate(123)
def create_meta(self, description, tags): return NumberMeta("int32", description=description, tags=tags)
def test_float_to_int_truncates(self): nm = NumberMeta("int32") assert nm.validate(123.6) == 123