예제 #1
0
    def stage(self):
        self._resource, self._datum_factory, _ = compose_resource(
            start={"uid": "a lie"},
            spec="newton",
            root="/",
            resource_path="",
            resource_kwargs={
                "radius": self._R,
                "wave_number": self._k
            },
        )
        self._resource.pop("run_start")
        self._asset_docs_cache.append(("resource", self._resource))

        return super().stage()
예제 #2
0
파일: writers.py 프로젝트: xpdAcq/SHED
 def write(self, event):
     for k, v in event["data"].items():
         if isinstance(v, np.ndarray) and v.shape != ():
             resource_path = f'an_data/{event["uid"]}_{k}.npy'
             fpath = os.path.join(self.root, resource_path)
             os.makedirs(os.path.dirname(fpath), exist_ok=True)
             np.save(fpath, v)
             resource, compose_datum, compose_datum_page = compose_resource(
                 start=self.start,
                 spec=self.spec,
                 root=self.root,
                 resource_path=resource_path,
                 resource_kwargs=self.resource_kwargs,
             )
             yield "resource", resource
             datum = compose_datum(datum_kwargs=self.datum_kwargs)
             yield "datum", datum
             event["data"][k] = datum["datum_id"]
             event["filled"][k] = False
         # Don't write a file just for a single number!
         elif isinstance(v, np.ndarray) and np.isscalar(v):
             event["data"][k] = v.item()
     yield "event", event
예제 #3
0
    def stage(self):
        # for FileStoreTIFFIterativeWrite
        self._asset_docs_cache = deque()
        super().stage()
        self.logger.debug("stage")
        print(f"stage TiffWriter")
        resource_root_path = str(self.resource_root_path)
        resource_relative_path = str(self.relative_write_path /
                                     Path(self.file_name.get()))
        self._resource_document, self._datum_factory, _ = compose_resource(
            start={
                "uid":
                "must be a string now but will be replaced by the RunEngine with a real uid"
            },
            spec="ADC_TIFF",
            root=resource_root_path,
            resource_path=resource_relative_path,
            resource_kwargs={},
        )
        self._resource_document.pop("run_start")

        self._asset_docs_cache = deque()
        self._asset_docs_cache.append(("resource", self._resource_document))
예제 #4
0
    def trigger(self):
        super().trigger()

        date = datetime.datetime.now()
        file_name = new_uid()
        self._resource_document, self._datum_factory, _ = compose_resource(
            start={
                'uid': 'needed for compose_resource() but will be discarded'
            },
            spec=self._sim_type,
            root=self._root_dir,
            resource_path=str(
                Path(date.strftime('%Y/%m/%d')) / Path(f'{file_name}.dat')),
            resource_kwargs={})
        # now discard the start uid, a real one will be added later
        self._resource_document.pop('run_start')
        self._asset_docs_cache.append(('resource', self._resource_document))

        sim_result_file = str(
            Path(self._resource_document['root']) /
            Path(self._resource_document['resource_path']))

        if not self.source_simulation:
            if self.sirepo_component is not None:
                for component in self.data['models']['beamline']:
                    if 'autocomputeVectors' in component.keys():
                        self.autocompute_params[component[
                            'title']] = component['autocomputeVectors']
                for i in range(len(self.active_parameters)):
                    real_field = self.fields['field' + str(i)].replace(
                        'sirepo_', '')
                    dict_key = self.fields['field' + str(i)].replace(
                        'sirepo', self.parents['par' + str(i)])
                    x = self.active_parameters[dict_key].read(
                    )[f'{self.parents["par" + str(i)]}_{self.fields["field" + str(i)]}'][
                        'value']
                    element = self.sb.find_element(
                        self.data['models']['beamline'], 'title',
                        self.parents['par' + str(i)])
                    element[real_field] = x
                    if self.parents[f'par{i}'] in self.autocompute_params.keys(
                    ) and 'grazingAngle' in dict_key:
                        grazing_vecs_dict = {}
                        autocompute_key = f'{self.parents[f"par{i}"]}_sirepo_autocomputeVectors'
                        autocompute_type = self.sirepo_components[self.parents[
                            f'par{i}']].read()[autocompute_key]['value']
                        grazing_vecs_dict['angle'] = x
                        grazing_vecs_dict[
                            'autocompute_type'] = autocompute_type
                        optic_id = self.sb.find_optic_id_by_name(
                            self.parents[f'par{i}'])
                        self.sb.update_grazing_vectors(
                            self.data['models']['beamline'][optic_id],
                            grazing_vecs_dict)

            watch = self.sb.find_element(self.data['models']['beamline'],
                                         'title', self.watch_name)

            self.data['report'] = 'watchpointReport{}'.format(watch['id'])
        else:
            self.data['report'] = "intensityReport"

        _, duration = self.sb.run_simulation()
        self.duration.put(duration)

        datafile = self.sb.get_datafile()

        with open(sim_result_file, 'wb') as f:
            f.write(datafile)

        def update_components(_data):
            self.shape.put(_data['shape'])
            self.mean.put(_data['mean'])
            self.photon_energy.put(_data['photon_energy'])
            self.horizontal_extent.put(_data['horizontal_extent'])
            self.vertical_extent.put(_data['vertical_extent'])

        if self.data['report'] in self.one_d_reports:
            ndim = 1
        else:
            ndim = 2
        ret = read_srw_file(sim_result_file, ndim=ndim)
        self._resource_document["resource_kwargs"]["ndim"] = ndim
        update_components(ret)

        datum_document = self._datum_factory(datum_kwargs={})
        self._asset_docs_cache.append(("datum", datum_document))

        self.image.put(datum_document["datum_id"])

        self.sirepo_json.put(json.dumps(self.data))

        self._resource_document = None
        self._datum_factory = None

        return NullStatus()
예제 #5
0
def test_resource_start_optional():
    event_model.compose_resource(spec="TEST",
                                 root="/",
                                 resource_path="",
                                 resource_kwargs={})
예제 #6
0
    def trigger(self, *args, **kwargs):
        logger.debug(f"Custom trigger for {self.name}")

        date = datetime.datetime.now()
        self._assets_dir = date.strftime("%Y/%m/%d")
        self._result_file = f"{new_uid()}.dat"

        self._resource_document, self._datum_factory, _ = compose_resource(
            start={"uid": "needed for compose_resource() but will be discarded"},
            spec=self.connection.data["simulationType"],
            root=self._root_dir,
            resource_path=str(Path(self._assets_dir) / Path(self._result_file)),
            resource_kwargs={},
        )
        # now discard the start uid, a real one will be added later
        self._resource_document.pop("run_start")
        self._asset_docs_cache.append(("resource", self._resource_document))

        sim_result_file = str(
            Path(self._resource_document["root"])
            / Path(self._resource_document["resource_path"])
        )

        self.connection.data["report"] = f"watchpointReport{self.id._sirepo_dict['id']}"

        _, duration = self.connection.run_simulation()
        self.duration.put(duration)

        datafile = self.connection.get_datafile()

        with open(sim_result_file, "wb") as f:
            f.write(datafile)

        conn_data = self.connection.data
        sim_type = conn_data["simulationType"]
        if sim_type == "srw":
            ndim = 2  # this will always be a report with 2D data.
            ret = read_srw_file(sim_result_file, ndim=ndim)
            self._resource_document["resource_kwargs"]["ndim"] = ndim
        elif sim_type == "shadow":
            nbins = conn_data['models'][conn_data['report']]['histogramBins']
            ret = read_shadow_file(sim_result_file, histogram_bins=nbins)
            self._resource_document["resource_kwargs"]["histogram_bins"] = nbins

        def update_components(_data):
            self.shape.put(_data["shape"])
            self.mean.put(_data["mean"])
            self.photon_energy.put(_data["photon_energy"])
            self.horizontal_extent.put(_data["horizontal_extent"])
            self.vertical_extent.put(_data["vertical_extent"])

        update_components(ret)

        datum_document = self._datum_factory(datum_kwargs={})
        self._asset_docs_cache.append(("datum", datum_document))

        self.image.put(datum_document["datum_id"])

        self._resource_document = None
        self._datum_factory = None

        logger.debug(f"\nReport for {self.name}: "
                     f"{self.connection.data['report']}\n")

        # We call the trigger on super at the end to update the sirepo_data_json
        # and the corresponding hash after the simulation is run.
        super().trigger(*args, **kwargs)
        return NullStatus()