def stage(self): self._file_stem = sim.short_uid() self._path_stem = os.path.join(self.save_path, self._file_stem) self._datum_counter = itertools.count() # This is temporarily more complicated than it will be in the future. # It needs to support old configurations that have a registry. resource = { 'spec': self._spec, 'root': '', 'resource_path': self._path_stem, 'resource_kwargs': {}, 'path_semantics': os.name } # If a Registry is set, we need to allow it to generate the uid for us. if self.reg is not None: # register_resource has accidentally different parameter names... self._resource_uid = self.reg.register_resource( rpath=resource['resource_path'], rkwargs=resource['resource_kwargs'], root=resource['root'], spec=resource['spec'], path_semantics=resource['path_semantics']) # If a Registry is not set, we need to generate the uid. else: self._resource_uid = sim.new_uid() resource['uid'] = self._resource_uid self._asset_docs_cache.append(('resource', resource))
def trigger(self): super().trigger() datum_id = new_uid() date = datetime.datetime.now() srw_file = Path('/tmp/data') / Path(date.strftime('%Y/%m/%d')) / \ Path('{}.dat'.format(datum_id)) if not self.source_simulation: if self.sirepo_component is not None: real_field0 = self.field0.replace('sirepo_', '') real_field1 = self.field1.replace('sirepo_', '') x = self.active_parameters[real_field0].read( )[f'{self.sirepo_component.name}_{self.field0}']['value'] y = self.active_parameters[real_field1].read( )[f'{self.sirepo_component.name}_{self.field1}']['value'] element = self.sb.find_element(self.data['models']['beamline'], 'title', self.sirepo_component.name) element[real_field0] = x element[real_field1] = y watch = self.sb.find_element(self.data['models']['beamline'], 'title', self.watch_name) self.data['report'] = 'watchpointReport{}'.format(watch['id']) else: self.data['report'] = "intensityReport" self.sb.run_simulation() with open(srw_file, 'wb') as f: f.write(self.sb.get_datafile()) if self.data['report'] in self.one_d_reports: ndim = 1 else: ndim = 2 ret = read_srw_file(srw_file, ndim=ndim) self.image.put(datum_id) self.shape.put(ret['shape']) self.mean.put(ret['mean']) self.photon_energy.put(ret['photon_energy']) self.horizontal_extent.put(ret['horizontal_extent']) self.vertical_extent.put(ret['vertical_extent']) self._resource_id = self.reg.insert_resource('srw', srw_file, {'ndim': ndim}) self.reg.insert_datum(self._resource_id, datum_id, {}) return NullStatus()
def stage(self): self._file_stem = sim.short_uid() self._path_stem = os.path.join(self.save_path, self._file_stem) self._datum_counter = itertools.count() # This is temporarily more complicated than it will be in the future. # It needs to support old configurations that have a registry. resource = {'spec': self._spec, 'root': '', 'resource_path': self._path_stem, 'resource_kwargs': {}, 'path_semantics': os.name} self._resource_uid = sim.new_uid() resource['uid'] = self._resource_uid self._asset_docs_cache.append(('resource', resource))
def trigger(self): super().trigger() if self.field1 is not None: x = getattr(self.sirepo_component, self.field0).read( )[f'{self.sirepo_component.name}_{self.field0}']['value'] if self.field1 is not None: y = getattr(self.sirepo_component, self.field1).read( )[f'{self.sirepo_component.name}_{self.field1}']['value'] datum_id = new_uid() date = datetime.datetime.now() srw_file = Path('/tmp/data') / Path(date.strftime('%Y/%m/%d')) / \ Path('{}.dat'.format(datum_id)) element = self.sb.find_element(self.data['models']['beamline'], 'title', self.sirepo_component.name) if self.field0 is not None: real_field0 = self.field0.replace('sirepo_', '') if self.field1 is not None: real_field1 = self.field1.replace('sirepo_', '') if self.field0 is not None: #element[real_field0] = float(self.update_value(x, self.field0_units).value) element[real_field0] = x if self.field1 is not None: #element[real_field1] = float(self.update_value(y, self.field1_units).value) element[real_field1] = y watch = self.sb.find_element(self.data['models']['beamline'], 'title', self.watch_name) self.data['report'] = 'watchpointReport{}'.format(watch['id']) self.sb.run_simulation() with open(srw_file, 'wb') as f: f.write(self.sb.get_datafile()) ret = read_srw_file(srw_file) self.image.put(datum_id) self.shape.put(ret['shape']) self.mean.put(ret['mean']) self.photon_energy.put(ret['photon_energy']) self.horizontal_extent.put(ret['horizontal_extent']) self.vertical_extent.put(ret['vertical_extent']) self._resource_id = self.reg.insert_resource('srw', srw_file, {}) self.reg.insert_datum(self._resource_id, datum_id, {}) return NullStatus()
def stage(self): file_stem = short_uid() self._datum_counter = itertools.count() self._path_stem = os.path.join(self.save_path, file_stem) self._resource_uid = new_uid() resource = { 'spec': 'NPY_SEQ', 'root': self.save_path, 'resource_path': file_stem, 'resource_kwargs': {}, 'uid': self._resource_uid, 'path_semantics': { 'posix': 'posix', 'nt': 'windows' }[os.name] } self._asset_docs_cache.append(('resource', resource))
def trigger(self): super().trigger() x = self._motor0.read()[self._field0]['value'] y = self._motor1.read()[self._field1]['value'] datum_id = new_uid() date = datetime.datetime.now() srw_file = Path('/tmp/data') / Path(date.strftime('%Y/%m/%d')) / \ Path('{}.dat'.format(datum_id)) with _print_redirect(): srw_run(str(srw_file), slit_x_width=x, slit_y_width=y) ret = read_srw_file(srw_file) self.image.put(datum_id) self.shape.put(ret['shape']) self.mean.put(ret['mean']) self.photon_energy.put(ret['photon_energy']) self.horizontal_extent.put(ret['horizontal_extent']) self.vertical_extent.put(ret['vertical_extent']) self._resource_id = self.reg.insert_resource('srw', srw_file, {}) self.reg.insert_datum(self._resource_id, datum_id, {}) return NullStatus()
def trigger(self): super().trigger() date = datetime.datetime.now() file_name = new_uid() self._resource_document, self._datum_factory, _ = compose_resource( start={ 'uid': 'needed for compose_resource() but will be discarded' }, spec=self._sim_type, root=self._root_dir, resource_path=str( Path(date.strftime('%Y/%m/%d')) / Path(f'{file_name}.dat')), resource_kwargs={}) # now discard the start uid, a real one will be added later self._resource_document.pop('run_start') self._asset_docs_cache.append(('resource', self._resource_document)) sim_result_file = str( Path(self._resource_document['root']) / Path(self._resource_document['resource_path'])) if not self.source_simulation: if self.sirepo_component is not None: for component in self.data['models']['beamline']: if 'autocomputeVectors' in component.keys(): self.autocompute_params[component[ 'title']] = component['autocomputeVectors'] for i in range(len(self.active_parameters)): real_field = self.fields['field' + str(i)].replace( 'sirepo_', '') dict_key = self.fields['field' + str(i)].replace( 'sirepo', self.parents['par' + str(i)]) x = self.active_parameters[dict_key].read( )[f'{self.parents["par" + str(i)]}_{self.fields["field" + str(i)]}'][ 'value'] element = self.sb.find_element( self.data['models']['beamline'], 'title', self.parents['par' + str(i)]) element[real_field] = x if self.parents[f'par{i}'] in self.autocompute_params.keys( ) and 'grazingAngle' in dict_key: grazing_vecs_dict = {} autocompute_key = f'{self.parents[f"par{i}"]}_sirepo_autocomputeVectors' autocompute_type = self.sirepo_components[self.parents[ f'par{i}']].read()[autocompute_key]['value'] grazing_vecs_dict['angle'] = x grazing_vecs_dict[ 'autocompute_type'] = autocompute_type optic_id = self.sb.find_optic_id_by_name( self.parents[f'par{i}']) self.sb.update_grazing_vectors( self.data['models']['beamline'][optic_id], grazing_vecs_dict) watch = self.sb.find_element(self.data['models']['beamline'], 'title', self.watch_name) self.data['report'] = 'watchpointReport{}'.format(watch['id']) else: self.data['report'] = "intensityReport" _, duration = self.sb.run_simulation() self.duration.put(duration) datafile = self.sb.get_datafile() with open(sim_result_file, 'wb') as f: f.write(datafile) def update_components(_data): self.shape.put(_data['shape']) self.mean.put(_data['mean']) self.photon_energy.put(_data['photon_energy']) self.horizontal_extent.put(_data['horizontal_extent']) self.vertical_extent.put(_data['vertical_extent']) if self.data['report'] in self.one_d_reports: ndim = 1 else: ndim = 2 ret = read_srw_file(sim_result_file, ndim=ndim) self._resource_document["resource_kwargs"]["ndim"] = ndim update_components(ret) datum_document = self._datum_factory(datum_kwargs={}) self._asset_docs_cache.append(("datum", datum_document)) self.image.put(datum_document["datum_id"]) self.sirepo_json.put(json.dumps(self.data)) self._resource_document = None self._datum_factory = None return NullStatus()
def kickoff(self): sb = SirepoBluesky(self.server_name) data, schema = sb.auth(self.sim_code, self.sim_id) self._copies = [] self._srw_files = [] for i in range(self._copy_count): datum_id = new_uid() date = datetime.datetime.now() srw_file = str( Path(self.root_dir) / Path(date.strftime('%Y/%m/%d')) / Path('{}.dat'.format(datum_id))) self._srw_files.append(srw_file) _resource_uid = new_uid() resource = { 'spec': 'SIREPO_FLYER', 'root': self. root_dir, # from 00-startup.py (added by mrakitin for future generations :D) 'resource_path': srw_file, 'resource_kwargs': {}, 'path_semantics': { 'posix': 'posix', 'nt': 'windows' }[os.name], 'uid': _resource_uid } self._resource_uids.append(_resource_uid) self._asset_docs_cache.append(('resource', resource)) for param in self.params_to_change: # name doesn't need to be unique, server will rename it c1 = sb.copy_sim( '{} Bluesky'.format(sb.data['models']['simulation']['name']), ) print('copy {}, {}'.format( c1.sim_id, c1.data['models']['simulation']['name'])) for key, parameters_to_update in param.items(): optic_id = sb.find_optic_id_by_name(key) c1.data['models']['beamline'][optic_id].update( parameters_to_update) watch = sb.find_element(c1.data['models']['beamline'], 'title', self.watch_name) c1.data['report'] = 'watchpointReport{}'.format(watch['id']) self._copies.append(c1) if self.run_parallel: manager = Manager() self.return_status = manager.dict() procs = [] for i in range(self.copy_count): p = Process(target=self._run, args=(self._copies[i], self.return_status)) p.start() procs.append(p) # wait for procs to finish for p in procs: p.join() else: # run serial for i in range(self.copy_count): print(f'running sim: {self._copies[i].sim_id}') status = self._copies[i].run_simulation() print('Status:', status['state']) self.return_status[self._copies[i].sim_id] = status['state'] return NullStatus()
def kickoff(self): sb = SirepoBluesky(self.server_name) data, schema = sb.auth(self.sim_code, self.sim_id) self._copies = [] self._srw_files = [] autocompute_data = {} # grazing angle; check params_to_change for component in data['models']['beamline']: if 'autocomputeVectors' in component.keys(): autocompute_data[ component['title']] = component['autocomputeVectors'] update_grazing_vecs_list = [] for i in self.params_to_change: grazing_vecs_dict = {} for elem, param in i.items(): for param_name, val in param.items(): if elem in autocompute_data.keys( ) and param_name == 'grazingAngle': grazing_vecs_dict[elem] = { 'angle': val, 'autocompute_type': autocompute_data[elem] } update_grazing_vecs_list.append(grazing_vecs_dict) for i in range(self._copy_count): datum_id = new_uid() date = datetime.datetime.now() srw_file = str( Path(self.root_dir) / Path(date.strftime('%Y/%m/%d')) / Path('{}.dat'.format(datum_id))) self._srw_files.append(srw_file) _resource_uid = new_uid() resource = { 'spec': 'SIREPO_FLYER', 'root': self. root_dir, # from 00-startup.py (added by mrakitin for future generations :D) 'resource_path': srw_file, 'resource_kwargs': {}, 'path_semantics': { 'posix': 'posix', 'nt': 'windows' }[os.name], 'uid': _resource_uid } self._resource_uids.append(_resource_uid) self._asset_docs_cache.append(('resource', resource)) for i in range(len(self.params_to_change)): # name doesn't need to be unique, server will rename it c1 = sb.copy_sim( '{} Bluesky'.format(sb.data['models']['simulation']['name']), ) print('copy {} -> {}, {}'.format( sb.data['models']['simulation']['simulationId'], c1.sim_id, c1.data['models']['simulation']['name'])) for key, parameters_to_update in self.params_to_change[i].items(): optic_id = sb.find_optic_id_by_name(key) c1.data['models']['beamline'][optic_id].update( parameters_to_update) # update vectors if needed if key in update_grazing_vecs_list[i]: sb.update_grazing_vectors( c1.data['models']['beamline'][optic_id], update_grazing_vecs_list[i][key]) watch = sb.find_element(c1.data['models']['beamline'], 'title', self.watch_name) c1.data['report'] = 'watchpointReport{}'.format(watch['id']) self._copies.append(c1) if self.run_parallel: manager = Manager() self.return_status = manager.dict() self.return_duration = manager.dict() self.procs = [] for i in range(self.copy_count): p = Process(target=self._run, args=(self._copies[i], self.return_status, self.return_duration)) p.start() self.procs.append(p) # wait for procs to finish # for p in self.procs: # p.join() else: # run serial for i in range(self.copy_count): print(f'running sim: {self._copies[i].sim_id}') status, duration = self._copies[i].run_simulation() print( f"Status of sim {self._copies[i].sim_id}: {status['state']} in {duration:.01f} seconds" ) self.return_status[self._copies[i].sim_id] = status['state'] self.return_duration[self._copies[i].sim_id] = duration return NullStatus()