def to_madx(self, data): madx = super().to_madx(data) eb = LatticeUtil.find_first_command(data, 'bunched_beam') if not eb: return madx self.__normalize_elegant_beam(data, eb) mb = LatticeUtil.find_first_command(madx, 'beam') particle = LatticeUtil.find_first_command(data, 'change_particle') if particle: mb.particle = self._PARTICLE_MAP.get(particle.name, 'other') #TODO(pjm): other particle should set mass and charge else: mb.particle = 'electron' mb.energy = 0 madx.models.bunch.beamDefinition = 'pc' madx.models.bunch.longitudinalMethod = '2' mb.pc = eb.p_central_mev * 1e-3 mb.sigt = eb.sigma_s mb.sige = eb.sigma_dp for f in self._BEAM_VARS: self._replace_var(madx, f, eb[f]) for dim in ('x', 'y'): mb[f'e{dim}'] = eb[f'emit_{dim}'] self._replace_var( madx, f'gamma_{dim}', '(1 + pow({}, 2)) / {}'.format( self._var_name(f'alpha_{dim}'), self._var_name(f'beta_{dim}'), ), ) return madx
def _delete_unused_madx_commands(data): # remove all commands except first beam and twiss by_name = PKDict( beam=None, twiss=None, ) for c in data.models.commands: if c._type in by_name and not by_name[c._type]: by_name[c._type] = c if by_name.twiss: by_name.twiss.sectorfile = '0' by_name.twiss.sectormap = '0' by_name.twiss.file = '1' data.models.bunch.beamDefinition = 'gamma' _SIM_DATA.update_beam_gamma(by_name.beam) data.models.commands = [ by_name.beam, PKDict( _id=LatticeUtil.max_id(data) + 1, _type='select', flag='twiss', column='name,keyword,s,x,y', ), by_name.twiss or PKDict( _id=LatticeUtil.max_id(data) + 2, _type='twiss', file='1', ) ]
def from_madx(self, madx): data = super().from_madx(madx) eb = LatticeUtil.find_first_command(data, 'bunched_beam') mb = LatticeUtil.find_first_command(madx, 'beam') for f in self._BEAM_VARS: v = self._find_var(madx, f) if v: eb[f] = v.value ers = LatticeUtil.find_first_command(data, 'run_setup') ers.p_central_mev = self.particle_energy.pc * 1e3 eb.emit_x = mb.ex eb.emit_y = mb.ey eb.sigma_s = mb.sigt eb.sigma_dp = mb.sige if mb.particle != 'electron': data.models.commands.insert( 0, PKDict( _id=_SIM_DATA.elegant_max_id(data), _type='change_particle', name=self._PARTICLE_MAP.get(mb.particle, 'custom'), #TODO(pjm): custom particle should set mass_ratio and charge_ratio )) return data
def _format_field_value(state, model, field, el_type): value = model[field] if el_type == 'Boolean': value = 'true' if value == '1' else 'false' elif el_type == 'RPNValue': value = _fix_opal_float(value) elif el_type == 'InputFile': value = '"{}"'.format( _SIM_DATA.lib_file_name_with_model_field( LatticeUtil.model_name_for_data(model), field, value)) elif el_type == 'OutputFile': ext = 'dat' if model.get('_type', '') == 'list' else 'h5' value = '"{}.{}.{}"'.format(model.name, field, ext) elif re.search(r'List$', el_type): value = state.id_map[int(value)].name elif re.search(r'String', el_type): if len(str(value)): if not re.search(r'^\s*\{.*\}$', value): value = '"{}"'.format(value) elif LatticeUtil.is_command(model): if el_type != 'RPNValue' and len(str(value)): value = '"{}"'.format(value) elif not LatticeUtil.is_command(model): if model.type in _ELEMENTS_WITH_TYPE_FIELD and '_type' in field: return ['type', value] if len(str(value)): return [field, value] return None
def _format_field_value(state, model, field, el_type): value = model[field] if el_type.endswith('StringArray'): return ['{}[0]'.format(field), value] if el_type == 'RPNValue': value = _format_rpn_value(value, is_command=LatticeUtil.is_command(model)) elif el_type == 'OutputFile': value = state.filename_map[_file_id(model._id, state.field_index)] elif el_type.startswith('InputFile'): value = _SIM_DATA.lib_file_name_with_model_field( LatticeUtil.model_name_for_data(model), field, value) if el_type == 'InputFileXY': value += '={}+{}'.format(model[field + 'X'], model[field + 'Y']) elif el_type == 'BeamInputFile': value = 'bunchFile-sourceFile.{}'.format(value) elif el_type == 'LatticeBeamlineList': value = state.id_map[int(value)].name elif el_type == 'ElegantLatticeList': if value and value == 'Lattice': value = 'elegant.lte' else: value = value + '.filename.lte' elif field == 'command' and LatticeUtil.model_name_for_data( model) == 'SCRIPT': for f in ('commandFile', 'commandInputFile'): if f in model and model[f]: fn = _SIM_DATA.lib_file_name_with_model_field( model.type, f, model[f]) value = re.sub(r'\b' + re.escape(model[f]) + r'\b', fn, value) if model.commandFile: value = './' + value if not _is_numeric(el_type, value): value = '"{}"'.format(value) return [field, value]
def __fixup_distribution(self, madx, data): mb = LatticeUtil.find_first_command(madx, 'beam') dist = LatticeUtil.find_first_command(data, 'distribution') beta_gamma = self.particle_energy.beta * self.particle_energy.gamma self._replace_var(data, 'brho', self.particle_energy.brho) self._replace_var(data, 'gamma', self.particle_energy.gamma) self._replace_var( data, 'beta', 'sqrt(1 - (1 / pow({}, 2)))'.format(self._var_name('gamma'))) for dim in ('x', 'y'): self._replace_var(data, f'emit_{dim}', mb[f'e{dim}']) beta = self._find_var(madx, f'beta_{dim}') if beta: dist[f'sigma{dim}'] = 'sqrt({} * {})'.format( self._var_name(f'emit_{dim}'), self._var_name(f'beta_{dim}')) dist[f'sigmap{dim}'] = 'sqrt({} * {}) * {} * {}'.format( self._var_name(f'emit_{dim}'), self._var_name(f'gamma_{dim}'), self._var_name('beta'), self._var_name('gamma')) dist[f'corr{dim}'] = '-{}/sqrt(1 + pow({}, 2))'.format( self._var_name(f'alpha_{dim}'), self._var_name(f'alpha_{dim}')) if self._find_var(madx, 'dp_s_coupling'): dist.corrz = self._var_name('dp_s_coupling') ob = LatticeUtil.find_first_command(data, 'beam') ob.bcurrent = mb.bcurrent if self._find_var(madx, 'n_particles_per_bunch'): ob.npart = self._var_name('n_particles_per_bunch') dist.sigmaz = self.__val(mb.sigt) dist.sigmapz = '{} * {} * {}'.format(mb.sige, self._var_name('beta'), self._var_name('gamma'))
def _format_field_value(state, model, field, el_type): value = model[field] if el_type == 'Boolean': value = 'true' if value == '1' else 'false' elif el_type == 'InputFile': value = '"{}"'.format( _SIM_DATA.lib_file_name_with_model_field( LatticeUtil.model_name_for_data(model), field, value)) elif el_type == 'OutputFile': ext = 'dat' if model.get('_type', '') == 'list' else 'h5' value = '"{}.{}.{}"'.format(model.name, field, ext) elif re.search(r'List$', el_type): value = state.id_map[int(value)].name # elif LatticeUtil.is_command(model): #TODO(pjm): determine the general case where values need quotes # if model._type == 'run' and field == 'method': # value = '"{}"'.format(value) #value = '"{}"'.format(value) # if el_type == 'Boolean' or el_type == 'RPNValue': # pass # elif value: # value = '"{}"'.format(value) elif re.search(r'String', el_type): if len(str(value)): if not re.search(r'^\s*\{.*\}$', value): value = '"{}"'.format(value) elif LatticeUtil.is_command(model): if el_type != 'RPNValue' and len(str(value)): value = '"{}"'.format(value) elif not LatticeUtil.is_command(model): if model.type in _ELEMENTS_WITH_TYPE_FIELD and '_type' in field: return ['type', value] if len(str(value)): return [field, value] return None
def _fixup_madx(madx, data): import sirepo.template.madx cv = sirepo.template.madx.madx_code_var(madx.models.rpnVariables) import pykern.pkjson assert _has_command(madx, 'beam'), \ 'MAD-X file missing BEAM command' beam = LatticeUtil.find_first_command(madx, 'beam') if beam.energy == 1 and (beam.pc != 0 or beam.gamma != 0 or beam.beta != 0 or beam.brho != 0): # unset the default mad-x value if other energy fields are set beam.energy = 0 particle = beam.particle.lower() LatticeUtil.find_first_command(data, 'beam').particle = particle.upper() energy = ParticleEnergy.compute_energy('madx', particle, beam.copy()) LatticeUtil.find_first_command(data, 'beam').pc = energy.pc LatticeUtil.find_first_command( data, 'track').line = data.models.simulation.visualizationBeamlineId for el in data.models.elements: if el.type == 'SBEND' or el.type == 'RBEND': # mad-x is GeV (total energy), designenergy is MeV (kinetic energy) el.designenergy = round( (energy.energy - ParticleEnergy.PARTICLE[particle].mass) * 1e3, 6, ) # this is different than the opal default of "2 * sin(angle / 2) / length" # but matches elegant and synergia el.k0 = cv.eval_var_with_assert( el.angle) / cv.eval_var_with_assert(el.l) el.gap = 2 * cv.eval_var_with_assert(el.hgap)
def _twiss_simulation(self): d = self.data max_id = LatticeUtil.max_id(d) sim = d.models.simulation sim.simulationMode = 'serial' run_setup = LatticeUtil.find_first_command(d, 'run_setup') or PKDict( _id=max_id + 1, _type='run_setup', lattice='Lattice', p_central_mev=d.models.bunch.p_central_mev, ) run_setup.use_beamline = sim.activeBeamlineId run_setup.always_change_p0 = '0' twiss_output = LatticeUtil.find_first_command( d, 'twiss_output') or PKDict( _id=max_id + 2, _type='twiss_output', filename='1', ) twiss_output.final_values_only = '0' twiss_output.output_at_each_step = '0' change_particle = LatticeUtil.find_first_command(d, 'change_particle') d.models.commands = [ run_setup, twiss_output, ] if change_particle: d.models.commands.insert(0, change_particle) return self._full_simulation()
def _generate_twiss_simulation(data, v): max_id = _SIM_DATA.elegant_max_id(data) sim = data.models.simulation sim.simulationMode = 'serial' run_setup = LatticeUtil.find_first_command(data, 'run_setup') or PKDict( _id=max_id + 1, _type='run_setup', lattice='Lattice', p_central_mev=data.models.bunch.p_central_mev, ) run_setup.use_beamline = sim.activeBeamlineId twiss_output = LatticeUtil.find_first_command(data, 'twiss_output') or PKDict( _id=max_id + 2, _type='twiss_output', filename='1', ) twiss_output.final_values_only = '0' twiss_output.output_at_each_step = '0' change_particle = LatticeUtil.find_first_command(data, 'change_particle') data.models.commands = [ run_setup, twiss_output, ] if change_particle: data.models.commands.insert(0, change_particle) return _generate_full_simulation(data, v)
def _generate_parameters_file(data): res, v = template_common.generate_parameters_file(data) v.report = re.sub(r'\d+$', '', data.get('report', '')) if v.report in _INITIAL_REPORTS: # these reports do not require running madx first v.initialTwissParameters = _get_initial_twiss_params(data) v.numParticles = data.models.simulation.numberOfParticles v.particleFile = simulation_db.simulation_dir(SIM_TYPE, data.simulationId) \ .join(data.report).join('ptc_particles.txt') res = template_common.render_jinja(SIM_TYPE, v, 'bunch.py') return res util = LatticeUtil(data, _SCHEMA) filename_map = _build_filename_map_from_util(util) report = data.get('report', '') code_var = _code_var(data.models.rpnVariables) v.twissOutputFilename = _TWISS_OUTPUT_FILE v.lattice = _generate_lattice(filename_map, util) v.variables = _generate_variables(code_var, data) v.useBeamline = util.select_beamline().name if report == 'twissReport': v.twissOutputFilename = _TWISS_OUTPUT_FILE return template_common.render_jinja(SIM_TYPE, v, 'twiss.madx') _add_commands(data, util) v.commands = _generate_commands(filename_map, util) v.hasTwiss = bool(util.find_first_command(data, 'twiss')) if not v.hasTwiss: v.twissOutputFilename = _TWISS_OUTPUT_FILE return template_common.render_jinja(SIM_TYPE, v, 'parameters.madx')
def _lib_file_basenames(cls, data): res = LatticeUtil(data, cls.schema()).iterate_models( lattice.InputFileIterator(cls)).result if data.models.bunchFile.sourceFile: res.append( cls.lib_file_name_with_model_field( 'bunchFile', 'sourceFile', data.models.bunchFile.sourceFile)) return res
def to_madx(self, data): madx = super().to_madx(data) mb = LatticeUtil.find_first_command(madx, 'beam') ob = LatticeUtil.find_first_command(data, 'beam') for f in ob: if f in mb: mb[f] = ob[f] od = LatticeUtil.find_first_command(data, 'distribution') #TODO(pjm): save dist in vars return madx
def from_madx(self, madx): data = super().from_madx(madx) data.models.simulation.elementPosition = 'relative' mb = LatticeUtil.find_first_command(madx, 'beam') LatticeUtil.find_first_command(data, 'option').version = 20000 LatticeUtil.find_first_command(data, 'beam').particle = mb.particle.upper() LatticeUtil.find_first_command(data, 'beam').pc = self.particle_energy.pc LatticeUtil.find_first_command(data, 'track').line = data.models.simulation.visualizationBeamlineId self.__fixup_distribution(madx, data) return data
def to_madx(self, data): madx = super().to_madx(data) mb = LatticeUtil.find_first_command(madx, 'beam') ob = LatticeUtil.find_first_command(data, 'beam') for f in ob: if f in mb and f in _SCHEMA.model.command_beam: mb[f] = ob[f] if f in ('gamma', 'energy', 'pc') and mb[f]: madx.models.bunch.beamDefinition = f od = LatticeUtil.find_first_command(data, 'distribution') #TODO(pjm): save dist in vars return madx
def _generate_parameters_file(data): res, v = template_common.generate_parameters_file(data) util = LatticeUtil(data, _SCHEMA) v.update( dict( lattice=_generate_lattice(util), use_beamline=util.select_beamline().name, commands=_generate_commands(util), )) report = data.get('report', '') if report == 'twissReport': return template_common.render_jinja(SIM_TYPE, v, 'twiss.in') return template_common.render_jinja(SIM_TYPE, v, 'parameters.in')
def field(self, model, field_schema, field): self.field_index += 1 if field_schema[1] == 'OutputFile': if LatticeUtil.is_command(model): suffix = self._command_file_extension(model) filename = '{}{}.{}.{}'.format( model._type, self.model_index[self.model_name] if self.model_index[self.model_name] > 1 else '', field, suffix) else: filename = '{}.{}.sdds'.format(model.name, field) k = LatticeUtil.file_id(model._id, self.field_index) self.result[k] = filename self.result.keys_in_order.append(k)
def _compute_percent_complete(data, last_element, step): if step > 1: cmd = LatticeUtil.find_first_command(data, 'run_control') if cmd and cmd.n_steps: n_steps = 0 if code_variable.CodeVar.is_var_value(cmd.n_steps): n_steps = _code_var(data.models.rpnVariables).eval_var( cmd.n_steps)[0] else: n_steps = int(cmd.n_steps) if n_steps and n_steps > 0: return min(100, step * 100 / n_steps) if not last_element: return 0 elements = PKDict() for e in data.models.elements: elements[e._id] = e beamlines = PKDict() for b in data.models.beamlines: beamlines[b.id] = b id = data.models.simulation.visualizationBeamlineId beamline_map = PKDict() count = _walk_beamline(beamlines[id], 1, elements, beamlines, beamline_map) index = beamline_map[last_element] if last_element in beamline_map else 0 return min(100, index * 100 / count)
def get_data_file(run_dir, model, frame, options=None, **kwargs): def _sdds(filename): path = run_dir.join(filename) assert path.check(file=True, exists=True), \ '{}: not found'.format(path) if not options.suffix: return path if options.suffix != 'csv': raise AssertionError( f'invalid suffix={options.suffix} for download path={path}') out = elegant_common.subprocess_output( ['sddsprintout', '-columns', '-spreadsheet=csv', str(path)], ) assert out, \ f'{path}: invalid or empty output from sddsprintout' return PKDict( uri=path.purebasename + '.csv', content=out, ) if frame >= 0: data = simulation_db.read_json( run_dir.join(template_common.INPUT_BASE_NAME)) # ex. elementAnimation17-55 i = LatticeUtil.file_id_from_output_model_name(model) return _sdds(_get_filename_for_element_id(i, data)) if model == 'animation': return template_common.text_data_file(ELEGANT_LOG_FILE, run_dir) return _sdds(_report_output_filename('bunchReport'))
def _output_info(run_dir): # cache outputInfo to file, used later for report frames info_file = run_dir.join(_OUTPUT_INFO_FILE) if os.path.isfile(str(info_file)): try: res = simulation_db.read_json(info_file) if not res or res[0].get('_version', '') == _OUTPUT_INFO_VERSION: return res except ValueError as e: pass data = simulation_db.read_json( run_dir.join(template_common.INPUT_BASE_NAME)) files = _build_filename_map(data) res = [] for k in files.keys_in_order: f = files[k] if run_dir.join(f.filename).exists(): res.append(_file_info(f.filename, run_dir, k)) if LatticeUtil.find_first_command(data, _END_MATCH_COMMAND): res.insert( 0, PKDict( modelKey='matchAnimation', filename='madx.log', isHistogram=False, plottableColumns=[], pageCount=0, )) if res: res[0]['_version'] = _OUTPUT_INFO_VERSION simulation_db.write_json(info_file, res) return res
def write_files(self, data, source_path, dest_dir): """writes files for the simulation Returns: PKDict: structure of files written (debugging only) """ class _G(_Generate): def _abspath(basename): return source_path.new(basename=basename) def _input_file(self, model_name, field, filename): return filename def _lattice_filename(self, value): return value g = _G(data) g.sim() v = g.jinja_env r = PKDict( commands=dest_dir.join(source_path.basename), lattice=self._lattice_path(dest_dir, data), ) pkio.write_text(r.commands, v.commands) pkio.write_text(r.lattice, v.rpn_variables + v.lattice) for f in set( LatticeUtil(data, _SCHEMA).iterate_models( lattice.InputFileIterator(_SIM_DATA)).result, ): f = _SIM_DATA.lib_file_name_without_type(f) dest_dir.join(f).mksymlinkto(source_path.new(basename=f), absolute=False) f = g.filename_map r.output_files = [f[k] for k in f.keys_in_order] return r
def _convert(self, data): cv = code_var(data.models.rpnVariables) def _model(model, name): schema = _SCHEMA.model[name] k = x = v = None try: for k, x in schema.items(): t = x[1] v = model[k] if k in model else x[2] if t == 'RPNValue': t = 'Float' if cv.is_var_value(v): model[k] = cv.eval_var_with_assert(v) continue if t == 'Float': model[k] = float(v) if v else 0. elif t == 'Integer': model[k] = int(v) if v else 0 except Exception as e: pkdlog('model={} field={} decl={} value={} exception={}', name, k, x, v, e) raise for x in data.models.rpnVariables: x.value = cv.eval_var_with_assert(x.value) for k, v in data.models.items(): if k in _SCHEMA.model: _model(v, k) for x in ('elements', 'commands'): for m in data.models[x]: _model(m, LatticeUtil.model_name_for_data(m)) return data
def eval_code_var(data): # TODO(e-carlin): When #3111 is merged use the code in LibAdapterBase._convert # to do this work. It is copied from there. cv = code_var(data.models.rpnVariables) def _model(model, name): schema = _SCHEMA.model[name] k = x = v = None try: for k, x in schema.items(): t = x[1] v = model[k] if k in model else x[2] if t == 'RPNValue': t = 'Float' if cv.is_var_value(v): model[k] = cv.eval_var_with_assert(v) continue if t == 'Float': model[k] = float(v) if v else 0. elif t == 'Integer': model[k] = int(v) if v else 0 except Exception as e: pkdlog('model={} field={} decl={} value={} exception={}', name, k, x, v, e) raise for x in data.models.rpnVariables: x.value = cv.eval_var_with_assert(x.value) for k, v in data.models.items(): if k in _SCHEMA.model: _model(v, k) for x in ('elements', 'commands'): for m in data.models[x]: _model(m, LatticeUtil.model_name_for_data(m))
def fixup_old_data(cls, data): dm = data.models if 'twissEllipseReport1' not in dm: for i in range(1, 3): m = dm['twissEllipseReport{}'.format(i)] = PKDict() cls.update_model_defaults(m, 'twissEllipseReport') m.dim = 'x' if i == 1 else 'y' if 'bunchReport1' not in dm: b_params = [('x', 'px'), ('y', 'py'), ('x', 'y'), ('t', 'pt')] for (i, p) in enumerate(b_params): m = dm['bunchReport{}'.format(i + 1)] = PKDict() cls.update_model_defaults(m, 'bunchReport') m.x = b_params[i][0] m.y = b_params[i][1] if 'initialTwissParams' not in dm: for dim in ['x', 'y']: m = dm['initialTwissParams'] = PKDict() cls.update_model_defaults(m, 'initialTwissParams') m.dim = dim for container in ('commands', 'elements'): for m in dm[container]: cls.update_model_defaults(m, LatticeUtil.model_name_for_data(m)) for m in dm.commands: if m._type == 'twiss' and 'file' not in m: m.file = "1"
def __update_filenames(self): res = [] visited = set() for container in ('elements', 'commands'): for el in self.data.models[container]: model_name = self.util.model_name_for_data(el) el_schema = self.schema.model[model_name] for f in el: if f not in el_schema: continue if el_schema[f][1] == 'OutputFile' and el[f]: el[f] = '1' elif el_schema[f][1] == 'InputFile' and el[f]: el[f] = self.sim_data.lib_file_name_without_type( os.path.basename(el[f])) filename = self.sim_data.lib_file_name_with_model_field( model_name, f, el[f]) if filename not in visited: res.append( PKDict( label=el.name, type=LatticeUtil.type_for_data(el), file_type='{}-{}'.format(model_name, f), filename=el[f], field=f, lib_filename=filename, )) visited.add(filename) return res
def start(self, model): self.field_index = 0 self.model_name = LatticeUtil.model_name_for_data(model) if self.model_name in self.model_index: self.model_index[self.model_name] += 1 else: self.model_index[self.model_name] = 1
def _validate_data(data, schema): # ensure enums match, convert ints/floats, apply scaling enum_info = template_common.validate_models(data, schema) for m in data.models.elements: template_common.validate_model( m, schema.model[LatticeUtil.model_name_for_data(m)], enum_info)
def field(self, model, field_schema, field): self.field_index += 1 # for now only interested in element outfn output files if field == 'outfn' and field_schema[1] == 'OutputFile': filename = '{}.{}.h5'.format(model.name, field) k = LatticeUtil.file_id(model._id, self.field_index) self.result[k] = filename self.result.keys_in_order.append(k)
def test_file_iterator(): from sirepo.template import lattice from sirepo.template.lattice import LatticeUtil from pykern.pkunit import pkeq data = _find_example('bunchComp - fourDipoleCSR') v = LatticeUtil(data, _elegant()._SCHEMA).iterate_models( lattice.InputFileIterator(_elegant()._SIM_DATA)).result pkeq(v, ['WAKE-inputfile.knsl45.liwake.sdds'])
def _validate_data(data, schema): # ensure enums match, convert ints/floats, apply scaling enum_info = template_common.validate_models(data, schema) _correct_halo_gaussian_distribution_type(data.models.bunch) for model_type in ['elements', 'commands']: for m in data.models[model_type]: template_common.validate_model( m, schema.model[LatticeUtil.model_name_for_data(m)], enum_info) _correct_halo_gaussian_distribution_type(m)