def __convert_sequences_to_beamlines(self, code_var): data = PKDict(models=self.data.models, ) drifts = self._compute_drifts(code_var) util = lattice.LatticeUtil(data, self.schema) for seq in data.models.sequences: beamline = PKDict( name=seq.name, items=[], ) #TODO(pjm): need to realign elements which are not "at" entry # assert 'refer' not in seq or seq.refer.lower() == 'entry', \ # 'unsupported sequence refer: {}: {}'.format(seq.name, seq.refer) prev = None for item in seq['items']: el = util.id_map[item[0]] at = self._eval_var(code_var, item[1]) if prev is not None: d = self._get_drift(drifts, at - prev) if d: beamline['items'].append(d) beamline['items'].append(el._id) prev = at + self._eval_var(code_var, el.get('l', 0)) if len(beamline['items']): if 'l' in seq: d = self._get_drift(drifts, self._eval_var(code_var, seq.l) - prev) if d: beamline['items'].append(d) beamline.id = self.parser.next_id() data.models.beamlines.append(beamline) del data.models['sequences'] util.sort_elements_and_beamlines()
def __convert_references_to_ids(self): util = lattice.LatticeUtil(self.data, self.schema) name_to_id = PKDict() for id in util.id_map: name = util.id_map[id].name if not name: continue name = name.upper() #assert name not in name_to_id, 'duplicate name: {}'.format(name) name_to_id[name] = id for container in ('elements', 'commands'): for el in self.data.models[container]: model_schema = self.schema.model[ lattice.LatticeUtil.model_name_for_data(el)] for f in model_schema: el_schema = model_schema[f] if f in el: if el[f] and 'LatticeBeamlineList' in el_schema[1]: el[f] = name_to_id[el[f].upper()] elif el[f] and el_schema[ 1] == 'OutputFile' and el[f] != '0': el[f] = '1' elif el_schema[1] in self.schema.enum: #TODO(pjm): ensure value is present in enum list el[f] = el[f].lower() if 'Boolean' in el_schema[1]: if el[f] == '1' or el[f] == '0': pass elif el[f].lower() == 'true': el[f] = '1' else: el[f] = '0'
def compute_cache(self, data, schema): if 'models' not in data: return None it = CodeVarIterator(self, data, schema) cache = lattice.LatticeUtil(data, schema).iterate_models(it).result for name, value in self.variables.items(): it.add_to_cache(name, value) data.models.rpnCache = cache return cache
def test_importer(import_req): from pykern.pkcollections import PKDict from pykern.pkunit import pkeq from sirepo.template import lattice from sirepo.template import elegant import sirepo.util import flask with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(import_req(fn)) except Exception as e: pkdlog(pkdexc()) error = str(e) if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant._generate_lattice( elegant._build_filename_map(data), lattice.LatticeUtil(data, elegant._SCHEMA), ), ) else: #TODO(robnagler) test simulationId data2 = elegant.import_file(import_req(fn.new(ext='ele.lte')), test_data=data) actual = elegant._generate_commands( elegant._build_filename_map(data2), lattice.LatticeUtil(data2, elegant._SCHEMA), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def compute_cache(self, data, schema): cache = lattice.LatticeUtil(data, schema).iterate_models( CodeVarIterator(self)).result for name, value in self.variables.items(): v, err = self.eval_var(value) if not err: if self.is_var_value(value): if self.case_insensitive: value = value.lower() cache[value] = v else: v = float(v) cache[name] = v return cache
def _generate_beamline_elements(report, data): res = '' beamline_map = PKDict() for bl in data.models.beamlines: beamline_map[bl.id] = bl element_map = PKDict() for el in copy.deepcopy(data.models.elements): element_map[el._id] = zgoubi_importer.MODEL_UNITS.scale_to_native(el.type, el) #TODO(pjm): special case for FFA dipole array if 'dipoles' in el: for dipole in el.dipoles: zgoubi_importer.MODEL_UNITS.scale_to_native(dipole.type, dipole) beamline_id = lattice.LatticeUtil(data, _SCHEMA).select_beamline().id return _generate_beamline(data, beamline_map, element_map, beamline_id)
def validate_var_delete(self, name, data, schema): in_use = [] for k, value in self.postfix_variables.items(): if k == name: continue for v in str(value).split(' '): if v == name: in_use.append(k) if len(in_use): return '"{}" is in use in variable(s): {}'.format( name, ', '.join(in_use)) in_use = lattice.LatticeUtil(data, schema).iterate_models( CodeVarDeleteIterator(self, name)).result if len(in_use): return '"{}" is in use in element(s): {}'.format( name, ', '.join(in_use)) return None
def _get_file_list_from_model(model, code_name): # Get input and lattice if exists file_list = [ model['_SimData__source'].basename, ] try: lattice_file = model._SimData__adapter._lattice_path('', model) file_list.append(lattice_file) except AttributeError: pass # Get all supporting files _sim_data, _, _schema = sirepo.sim_data.template_globals(code_name) files = lattice.LatticeUtil(model, _schema).iterate_models( lattice.InputFileIterator(_sim_data, update_filenames=False), ).result file_list.extend(files) return file_list
def parse_file(self, lattice_text): from sirepo.template import opal res = super().parse_file(lattice_text) self.__fix_pow_variables() self._add_variables_for_lattice_references() cv = opal.code_var(self.data.models.rpnVariables) self._code_variables_to_float(cv) self.__remove_bend_default_fmap() self.__remove_default_commands() self.__combine_track_and_run() self.util = lattice.LatticeUtil(self.data, self.schema) input_files = self.__update_filenames() self.__add_drifts_to_beamlines(cv) self._set_default_beamline('select', 'line') self.__legacy_fixups() self.__convert_references_to_ids() self.__combine_options() self.__dedup_elements() return res, input_files
def import_file(text, data=None, update_filenames=True): if not data: data = simulation_db.default_data(elegant_common.SIM_TYPE) models = elegant_lattice_parser.parse_file( text, data.models.rpnVariables, lattice.LatticeUtil.max_id(data), ) name_to_id, default_beamline_id = _create_name_map(models) if 'default_beamline_name' in models and models[ 'default_beamline_name'] in name_to_id: default_beamline_id = name_to_id[models['default_beamline_name']] element_names = PKDict() rpn_cache = PKDict() code_var = elegant_code_var(models.rpnVariables) for el in models['elements']: el['type'] = _validate_type(el, element_names) element_names[el['name'].upper()] = el validate_fields(el, rpn_cache, code_var, update_filenames) for bl in models['beamlines']: bl['items'] = _validate_beamline(bl, name_to_id, element_names) if len(models['elements']) == 0 or len(models['beamlines']) == 0: raise IOError('no beamline elements found in file') data['models']['elements'] = models['elements'] data['models']['beamlines'] = models['beamlines'] data['models']['rpnVariables'] = models['rpnVariables'] lattice.LatticeUtil(data, _SCHEMA).sort_elements_and_beamlines() if default_beamline_id: data['models']['simulation']['activeBeamlineId'] = default_beamline_id data['models']['simulation'][ 'visualizationBeamlineId'] = default_beamline_id # used by synergia app to get values for rpn expressions data['models']['rpnCache'] = rpn_cache return data
def __convert_sequences_to_beamlines(self, code_var): data = PKDict( models=self.data.models, ) drifts = self._compute_drifts(code_var) util = lattice.LatticeUtil(data, self.schema) for seq in data.models.sequences: beamline = PKDict( name=seq.name, items=[], ) alignment = seq.refer.lower() if 'refer' in seq else 'centre' assert alignment in ('entry', 'centre', 'exit'), \ 'invalid sequence alignment: {}'.format(alignment) prev = None for item in seq['items']: el = util.id_map[item[0]] at = self._eval_var(code_var, item[1]) length = self._eval_var(code_var, el.get('l', 0)) entry = at if alignment == 'centre': entry = at - length / 2 elif alignment == 'exit': entry = at - length if prev is not None: d = self._get_drift(drifts, entry - prev) if d: beamline['items'].append(d) beamline['items'].append(el._id) prev = entry + length if beamline['items']: if 'l' in seq: d = self._get_drift(drifts, self._eval_var(code_var, seq.l) - prev) if d: beamline['items'].append(d) beamline.id = self.parser.next_id() data.models.beamlines.append(beamline) del data.models['sequences'] util.sort_elements_and_beamlines()
def validate_var_delete(self, name, data, schema): search = self.canonicalize(name) in_use = [] for k, value in self.postfix_variables.items(): if k == search: continue for v in str(value).split(' '): if v == search: in_use.append(k) break if in_use: return '"{}" is in use in variable(s): {}'.format( name, ', '.join(in_use), ) in_use = lattice.LatticeUtil(data, schema).iterate_models( CodeVarDeleteIterator(self, search), ).result if in_use: return '"{}" is in use in element(s): {}'.format( name, ', '.join(in_use), ) return None