def stage(self):
     #self._rel_path_template = f"path/to/files/{uuid.uuid4()}_%d.ext"
     self._rel_path_template = f"{uuid.uuid4()}_%d.jpg"
     resource, self._datum_factory = resource_factory(
         self._SPEC, self._root, self._rel_path_template, {}, "posix")
     self._asset_docs_cache.append(('resource', resource))
     self._counter = itertools.count()
     # Set the filepath
     return super().stage()
 def stage(self):
     self._rel_path_template = f"{uuid.uuid4()}_%d.jpg"
     self._root = self.current_folder()
     resource, self._datum_factory = resource_factory(
         "BEAMLINE_WEBCAM", self._root, self._rel_path_template, {},
         "posix")
     self._asset_docs_cache.append(('resource', resource))
     self._counter = itertools.count()
     # Set the filepath
     return super().stage()
    def _generate_resource(self, resource_kwargs):
        # don't re-write the "normal" code path .... yet
        super()._generate_resource(resource_kwargs)
        fn = PurePath(self._fn).relative_to(self.reg_root)

        # Update the shape that describe() will report.
        self.time_stamp.shape = [self.get_frames_per_point()]

        resource, self._ts_datum_factory = resource_factory(
            spec="AD_HDF5_TS",
            root=str(self.reg_root),
            resource_path=str(fn),
            resource_kwargs=resource_kwargs,
            path_semantics=self.path_semantics,
        )

        self._ts_resource_uid = resource["uid"]
        self._asset_docs_cache.append(("resource", resource))
示例#4
0
    def trigger(self):
        os.makedirs('/tmp/demo', exist_ok=True)
        st = super().trigger()
        ret = super().read()
        val = ret[self.name]['value'].reshape(self._size_pv.get())

        resource, datum_factory = resource_factory(
            spec='npy',
            root='/tmp',
            resource_path=f'demo/{uuid.uuid4()}.npy',
            resource_kwargs={},
            path_semantics='posix')
        datum = datum_factory({})
        self._asset_docs_cache.append(('resource', resource))
        self._asset_docs_cache.append(('datum', datum))
        fpath = Path(resource['root']) / resource['resource_path']
        np.save(fpath, val)

        ret[self.name]['value'] = datum['datum_id']
        self._last_ret = ret
        return st
    def trigger(self):
        os.makedirs("/tmp/demo", exist_ok=True)
        st = super().trigger()
        ret = super().read()
        val = ret[self.name]["value"].reshape(self._size_pv.get())

        resource, datum_factory = resource_factory(
            spec="npy",
            root="/tmp",
            resource_path=f"demo/{uuid.uuid4()}.npy",
            resource_kwargs={},
            path_semantics="posix",
        )
        datum = datum_factory({})
        self._asset_docs_cache.append(("resource", resource))
        self._asset_docs_cache.append(("datum", datum))
        fpath = Path(resource["root"]) / resource["resource_path"]
        np.save(fpath, val)

        ret[self.name]["value"] = datum["datum_id"]
        self._last_ret = ret
        return st
示例#6
0
    def complete(self):
        """
        Call this when all needed data has been collected. This has no idea
        whether that is true, so it will obligingly stop immediately. It is
        up to the caller to ensure that the motion is actually complete.
        """
        # Our acquisition complete PV is : XF:05IDD-ES:1{Dev:Zebra1}:ARRAY_ACQ
        while self._encoder.pc.data_in_progress.get() == 1:
            ttime.sleep(.1)
            #poll()
        ttime.sleep(.1)
        self._mode = 'complete'
        # self._encoder.pc.arm.put(0)  # sanity check; this should happen automatically
        # this does the same as the above, but also aborts data collection
        self._encoder.pc.block_state_reset.put(1)
        #see triggering errors of the xspress3 on suspension.  This is to test the reset of the xspress3 after a line.
        self._det.settings.acquire.put(0)

        self.__filename = '{}.h5'.format(uuid.uuid4())
        self.__filename_sis = '{}.h5'.format(uuid.uuid4())
        self.__read_filepath = os.path.join(self.LARGE_FILE_DIRECTORY_READ_PATH,
                                            self.__filename)
        self.__read_filepath_sis = os.path.join(self.LARGE_FILE_DIRECTORY_READ_PATH,
                                                self.__filename_sis)
        self.__write_filepath = os.path.join(self.LARGE_FILE_DIRECTORY_WRITE_PATH,
                                             self.__filename)
        self.__write_filepath_sis = os.path.join(self.LARGE_FILE_DIRECTORY_WRITE_PATH,
                                                 self.__filename_sis)

        self.__filestore_resource, datum_factory_z = resource_factory(
            'ZEBRA_HDF51', root='/',
            resource_path=self.__read_filepath,
            resource_kwargs={}, path_semantics='posix')
        self.__filestore_resource_sis, datum_factory_sis = resource_factory(
            'SIS_HDF51', root='/',
            resource_path=self.__read_filepath_sis,
            resource_kwargs={},
            path_semantics='posix')

        time_datum = datum_factory_z({'column': 'time'})
        enc1_datum = datum_factory_z({'column': 'enc1'})
        sis_datum =  datum_factory_sis({'column': 'i0'})
        sis_time =  datum_factory_sis({'column': 'time'})

        self._document_cache.extend(('resource', d) for d in (self.__filestore_resource,
                                                             self.__filestore_resource_sis))
        self._document_cache.extend(('datum', d) for d in (time_datum, enc1_datum,
                                                          sis_datum, sis_time))
        self._document_cache.extend(self._det.collect_asset_docs())

        # TODO call 'read' on the detector instead
        # xs_datum_id = self.reg.register_datum(self._det.hdf5._filestore_res, {})
        xs_reading = self._det.read()
        # Write the file.
        export_zebra_data(self._encoder, self.__write_filepath,self._fast_axis)
        export_sis_data(self._sis, self.__write_filepath_sis)

        # Yield a (partial) Event document. The RunEngine will put this
        # into metadatastore, as it does all readings.
        self._last_bulk =  {
            'time': time.time(), 'seq_num': 1,
            'data': {'time': time_datum['datum_id'],
                     'enc1': enc1_datum['datum_id'],
                     'fluor': xs_reading['fluor']['value'],
                     'i0': sis_datum['datum_id'],
                     'i0_time': sis_time['datum_id']},
            'timestamps': {'time': time_datum['datum_id'],  # not a typo#
                           'enc1': time_datum['datum_id'],
                           'fluor': xs_reading['fluor']['timestamp'],
                           'i0': sis_time['datum_id'],
                           'i0_time': sis_time['datum_id']}
        }
        return NullStatus()