def _run_jspec(data): _elegant_to_madx(data['models']['ring']) exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) jspec_filename = template.JSPEC_INPUT_FILENAME pkio.write_text(jspec_filename, jspec_file) pksubprocess.check_call_with_signals(['jspec', jspec_filename], msg=pkdlog, output=template.JSPEC_LOG_FILE) return pkio.read_text(template.JSPEC_LOG_FILE)
def parse_elegant_log(run_dir): path = run_dir.join(ELEGANT_LOG_FILE) if not path.exists(): return '', 0 res = '' last_element = None text = pkio.read_text(str(path)) want_next_line = False prev_line = '' prev_err = '' for line in text.split("\n"): if line == prev_line: continue match = re.search('^Starting (\S+) at s\=', line) if match: name = match.group(1) if not re.search('^M\d+\#', name): last_element = name if want_next_line: res += line + "\n" want_next_line = False elif _is_ignore_error_text(line): pass elif _is_error_text(line): if len(line) < 10: want_next_line = True else: if line != prev_err: res += line + "\n" prev_err = line prev_line = line return res, last_element
def _run_elegant(): run_dir = os.getcwd() with open('in.json') as f: data = json.load(f) exec(pkio.read_text('elegant_parameters.py'), locals(), locals()) pkio.write_text('elegant.lte', lattice_file) pkio.write_text('elegant.ele', elegant_file) call(['elegant', 'elegant.ele']) index = 0 if sdds.sddsdata.InitializeInput(index, 'elegant.bun') != 1: sdds.sddsdata.PrintErrors(1) column_names = sdds.sddsdata.GetColumnNames(index) errorCode = sdds.sddsdata.ReadPage(index) if errorCode != 1: sdds.sddsdata.PrintErrors(1) bunch = data['models'][data['report']] x = sdds.sddsdata.GetColumn(index, column_names.index(bunch['x'])) y = sdds.sddsdata.GetColumn(index, column_names.index(bunch['y'])) nbins = int(bunch['histogramBins']) hist, edges = np.histogramdd([x, y], nbins) info = { 'x_range': [float(edges[0][0]), float(edges[0][-1]), len(hist)], 'y_range': [float(edges[1][0]), float(edges[1][-1]), len(hist[0])], 'x_label': _FIELD_LABEL[bunch['x']], 'y_label': _FIELD_LABEL[bunch['y']], 'title': _plot_title(bunch), 'z_matrix': hist.T.tolist(), } with open('out.json', 'w') as outfile: json.dump(info, outfile)
def parse_error_log(run_dir): text = pkio.read_text(run_dir.join(template_common.RUN_LOG)) errors = [] current = '' for line in text.split("\n"): if not line: if current: errors.append(current) current = '' continue m = re.match('\*\*\* (WARR?NING|ERROR) \*\*\*(.*)', line) if m: if not current: error_type = m.group(1) if error_type == 'WARRNING': error_type = 'WARNING' current = '{}: '.format(error_type) extra = m.group(2) if re.search(r'\S', extra) and not re.search(r'File:|Line:|line \d+', extra): current += '\n' + extra elif current: current += '\n' + line else: m = re.match('Propagator:*(.*?)Exiting', line) if m: errors.append(m.group(1)) if len(errors): return {'state': 'error', 'error': '\n\n'.join(errors)} return None
def render_file(filename, j2_ctx, output=None, strict_undefined=False): """Render filename as template with j2_ctx. Args: basename (str): name without jinja extension j2_ctx (dict): how to replace values in Jinja2 template output (str): file name of output; if None, return str strict_undefined (bool): set `jinja2.StrictUndefined` if True Returns: str: rendered template """ t = pkio.read_text(filename) kw = dict( trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True, ) if strict_undefined: kw['undefined'] = jinja2.StrictUndefined je = jinja2.Environment(**kw) res = je.from_string(t).render(j2_ctx) if output: pkio.write_text(output, res) return res
def _run_srw(): run_dir = os.getcwd() with open('in.json') as f: data = json.load(f) #TODO(pjm): need to properly escape data values, untrusted from client # this defines the get_srw_params() and get_beamline_optics() functions exec(pkio.read_text('srw_parameters.py'), locals(), locals()) v = srwl_bl.srwl_uti_parse_options(get_srw_params(), use_sys_argv=False) source_type, mag = setup_source(v) op = None if data['report'] == 'intensityReport': v.ss = True outfile = v.ss_fn elif data['report'] == 'fluxReport': v.sm = True outfile = v.sm_fn elif data['report'] == 'powerDensityReport': v.pw = True outfile = v.pw_fn elif data['report'] == 'initialIntensityReport' or data['report'] == 'sourceIntensityReport': v.si = True outfile = v.si_fn elif data['report'] == 'mirrorReport': _process_output(_mirror_plot(data), data) return elif re.search('^watchpointReport', data['report']): op = get_beamline_optics() v.ws = True outfile = v.ws_fni else: raise Exception('unknown report: {}'.format(data['report'])) if isinstance(mag, srwlib.SRWLGsnBm): mag = None srwl_bl.SRWLBeamline(_name=v.name, _mag_approx=mag).calc_all(v, op) _process_output(outfile, data)
def run_background(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text('warp_parameters.py'), locals(), locals()) n_stepped=0 while n_stepped < N_steps: step(10) n_stepped = n_stepped + 10
def _run_hellweg(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.HELLWEG_INPUT_FILE, input_file) pkio.write_text(template.HELLWEG_INI_FILE, ini_file) solver = BeamSolver(template.HELLWEG_INI_FILE, template.HELLWEG_INPUT_FILE) solver.solve() solver.save_output(template.HELLWEG_SUMMARY_FILE) solver.dump_bin(template.HELLWEG_DUMP_FILE)
def __init__(self, controller, parent=None): super(View, self).__init__(parent) self._controller = controller self.global_params = {} self.setStyleSheet(pkio.read_text(pkresource.filename('srw_pane.css'))) main = QtGui.QHBoxLayout() self._add_action_buttons(main) self._add_param_vbox(main) self._add_result_texts(main) self.setLayout(main)
def load_file(filename): """Read a file, making sure all keys and values are locale. Args: filename (str): file to read (Note: ``.yml`` will not be appended) Returns: object: `pkcollections.Dict` or list """ return load_str(pkio.read_text(filename))
def secret_path_value(self, filename, gen_secret=None, visibility=None): from rsconf import db src = db.secret_path(self.hdb, filename, visibility=visibility) if src.check(): return pkio.read_text(src), src assert gen_secret, \ 'unable to generate secret: path={}'.format(src) res = gen_secret() res = pkcompat.from_bytes(self._write_binary(src, res)) return res, src
def run_background(cfg_dir): res = {} try: with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) except Exception as e: res = { 'error': str(e), } simulation_db.write_result(res)
def _run_srw(): #TODO(pjm): need to properly escape data values, untrusted from client data = simulation_db.read_json(template_common.INPUT_BASE_NAME) if data['report'] == 'mirrorReport': #TODO(pjm): mirror report should use it's own jinja template _process_output(_mirror_plot(data), data) return # This defines the main() function: exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) main() _process_output(get_filename_for_model(data['report']), data)
def _run_hellweg(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.HELLWEG_INPUT_FILE, input_file) pkio.write_text(template.HELLWEG_INI_FILE, ini_file) s = solver.BeamSolver(template.HELLWEG_INI_FILE, template.HELLWEG_INPUT_FILE) s.solve() s.save_output(template.HELLWEG_SUMMARY_FILE) s.dump_bin(template.HELLWEG_DUMP_FILE)
def _get_default_drift(): """The function parses srw.js file to find the default values for drift propagation parameters, which can be sometimes missed in the exported .py files (when distance = 0), but should be presented in .json files. Returns: str: default drift propagation paramters """ c = pkio.read_text(_JS_DIR.join('srw.js')) m = re.search( r'function defaultDriftPropagationParams.*?return\s*(\[[^\]]+\])', c, re.DOTALL) return pkjson.load_any(m.group(1))
def parse_mpi_log(run_dir): e = None f = run_dir.join('mpi_run.out') if f.exists(): m = re.search( r'^Traceback .*?^\w*Error: (.*?)\n', pkio.read_text(f), re.MULTILINE | re.DOTALL, ) if m: e = m.group(1) return e
def test_parse_madx_file(): from pykern import pkio, pkjson from pykern.pkunit import pkeq from sirepo.template import madx, madx_parser for name in ('particle_track', ): actual = madx_parser.parse_file( pkio.read_text(pkunit.data_dir().join(f'{name}.madx'))) madx._fixup_madx(actual) del actual['version'] expect = pkjson.load_any(pkunit.data_dir().join(f'{name}.json')) pkeq(expect, actual)
def app_run(): data = _json_input() sid = simulation_db.parse_sid(data) err = _start_simulation(data).run_and_read() run_dir = simulation_db.simulation_run_dir(data) if err: pkdp('error: sid={}, dir={}, out={}', sid, run_dir, err) return flask.jsonify({ 'error': _error_text(err), 'simulationId': sid, }) return pkio.read_text(run_dir.join('out{}'.format(simulation_db.JSON_SUFFIX)))
def background_percent_complete(report, run_dir, is_running): def _grid_columns(): c = _grid_evolution_columns(run_dir) return [x for x in c if x[0] != '#'] if c \ else None def _plot_filenames(): return [ PKDict( time=_time_and_units(yt.load(str(f)).parameters['time']), filename=f.basename, ) for f in files ] def _plot_vars(): names = [] if len(files): io = simulation_db.read_json( run_dir.join(template_common.INPUT_BASE_NAME), ).models.IO_IOMain idx = 1 while io.get(f'plot_var_{idx}', ''): n = io[f'plot_var_{idx}'] if n != 'none': names.append(n) idx += 1 return names res = PKDict( percentComplete=0 if is_running else 100, ) if report == 'setupAnimation': f = run_dir.join(_SIM_DATA.SETUP_PARAMS_SCHEMA_FILE) if f.exists(): res.pkupdate( frameCount=1, flashSchema=pkjson.load_any(pkio.read_text(f)) ) else: _init_yt() files = _h5_file_list(run_dir) if is_running and len(files): # the last file may be unfinished if the simulation is running files.pop() res.pkupdate( frameCount=len(files), plotVars=_plot_vars(), plotFiles=_plot_filenames(), gridEvolutionColumns=_grid_columns(), ) return res
def test_write_text(): """Also tests read_text""" d = pkunit.empty_work_dir() expect_res = d.join('anything') expect_content = 'something' res = pkio.write_text(str(expect_res), expect_content) assert expect_res == res, \ 'Verify result is file path as py.path.Local' with open(str(expect_res)) as f: assert expect_content == f.read(), \ 'When write_text is called, it should write "something"' assert expect_content == pkio.read_text(str(expect_res)), \ 'When read_text, it should read "something"'
def test_from_elegant_to_madx_and_back(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant, madx, madx_converter, madx_parser with pkunit.save_chdir_work(): for name in ('SPEAR3', 'Compact Storage Ring', 'Los Alamos Proton Storage Ring'): data = _example_data(name) mad = madx_converter.to_madx(elegant.SIM_TYPE, data) outfile = name.lower().replace(' ', '-') + '.madx' actual = madx.python_source_for_model(mad, None) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual) data = madx_parser.parse_file(actual) lattice = madx_converter.from_madx(elegant.SIM_TYPE, data) outfile = name.lower().replace(' ', '-') + '.lte' actual = elegant.python_source_for_model(lattice, None) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_render_resource(): t1 = pkunit.import_module_from_data_dir('t1') with pkunit.save_chdir_work(): out = 'out' expect = '\n!v1!\n' assert expect == t1.render(None), \ 'render_resource should return rendered template' assert not glob.glob('*'), \ 'render_resource should not create any files' assert expect == t1.render(out), \ 'render_resource should return string even when writing to file' assert expect == pkio.read_text(out), \ 'With out, render_resource should write file'
def read_status(run_dir): """Read status from simulation dir Args: run_dir (py.path): where to read """ try: return pkio.read_text(run_dir.join(_STATUS_FILE)) except IOError as e: if pkio.exception_is_not_found(e): # simulation may never have been run return 'stopped' return 'error'
def app_run(): data = _json_input() sid = simulation_db.parse_sid(data) err = _start_simulation(data).run_and_read() run_dir = simulation_db.simulation_run_dir(data) if err: pkdp('error: sid={}, dir={}, out={}', sid, run_dir, err) return flask.jsonify({ 'error': _error_text(err), 'simulationId': sid, }) return pkio.read_text( run_dir.join('out{}'.format(simulation_db.JSON_SUFFIX)))
def test_importer(): from sirepo.template.srw_importer import import_python from pykern import pkio from pykern import pkresource from pykern import pkunit from pykern.pkdebug import pkdc, pkdp import glob import py _TESTS = { # Values are optional arguments: 'amx': ('amx', None), 'amx_bl2': ('amx', '--op_BL=2'), 'amx_bl3': ('amx', '--op_BL=3'), 'amx_bl4': ('amx', '--op_BL=4'), 'chx': ('chx', None), 'chx_fiber': ('chx_fiber', None), 'exported_chx': ('exported_chx', None), 'exported_gaussian_beam': ('exported_gaussian_beam', None), 'exported_undulator_radiation': ('exported_undulator_radiation', None), 'lcls_simplified': ('lcls_simplified', None), 'lcls_sxr': ('lcls_sxr', None), 'sample_from_image': ('sample_from_image', None), 'smi_es1_bump_norm': ('smi', '--beamline ES1 --bump --BMmode Norm'), 'smi_es1_nobump': ('smi', '--beamline ES1'), 'smi_es2_bump_lowdiv': ('smi', '--beamline ES2 --bump --BMmode LowDiv'), 'smi_es2_bump_norm': ('smi', '--beamline ES2 --bump --BMmode Norm'), 'srx': ('srx', None), 'srx_bl2': ('srx', '--op_BL=2'), 'srx_bl3': ('srx', '--op_BL=3'), 'srx_bl4': ('srx', '--op_BL=4'), } dat_dir = py.path.local(pkresource.filename('template/srw/', import_python)) with pkunit.save_chdir_work(): work_dir = py.path.local('.') for f in glob.glob(str(dat_dir.join('mirror_*d.dat'))): py.path.local(f).copy(work_dir) py.path.local(str(dat_dir.join('sample.tif'))).copy(work_dir) for b in sorted(_TESTS.keys()): base_py = '{}.py'.format(_TESTS[b][0]) code = pkio.read_text(pkunit.data_dir().join(base_py)) actual = import_python( code, tmp_dir=str(work_dir), lib_dir=str(work_dir), user_filename=r'c:\anything\{}.anysuffix'.format(_TESTS[b][0]), arguments=_TESTS[b][1], ) actual['version'] = 'IGNORE-VALUE' pkunit.assert_object_with_json(b, actual)
def run_background(cfg_dir): with pkio.save_chdir(cfg_dir): fn = 'run_background.py' cmd = [sys.executable or 'python', fn] script = pkio.read_text('srw_parameters.py') p = dict(pkcollections.map_items(cfg)) if cfg.slave_processes > 1: cmd[0:0] = [ 'mpiexec', '-n', # SRW includes a master process so 2 really needs 3 processes str(cfg.slave_processes + 1), ] script += ''' from mpi4py import MPI if MPI.COMM_WORLD.Get_rank(): import signal signal.signal(signal.SIGTERM, lambda x, y: MPI.COMM_WORLD.Abort(1)) ''' else: # In interactive (dev) mode, output as frequently as possible p['particles_per_slave'] = 1 script += ''' import srwl_bl v = srwl_bl.srwl_uti_parse_options(get_srw_params(), use_sys_argv=False) source_type, mag = setup_source(v) v.wm = True v.wm_nm = {total_particles} v.wm_na = {particles_per_slave} # Number of "iterations" per save is best set to num processes v.wm_ns = {slave_processes} op = get_beamline_optics() srwl_bl.SRWLBeamline(_name=v.name).calc_all(v, op) '''.format(**p) pkio.write_text(fn, script) try: p = subprocess.Popen( cmd, stdin=open(os.devnull), stdout=open('run_background.out', 'w'), stderr=subprocess.STDOUT, ) signal.signal(signal.SIGTERM, lambda x, y: p.terminate()) rc = p.wait() if rc != 0: p = None raise RuntimeError('child terminated: retcode={}'.format(rc)) finally: if not p is None: p.terminate()
def test_sirepo_parser(): with pkunit.save_chdir_work(): for b in ['SRWLIB_VirtBL_LCLS_SXR_01']: base_py = '{}.py'.format(b) code = pkio.read_text(pkunit.data_dir().join(base_py)) error, actual = import_python( code, tmp_dir='.', lib_dir='.', user_filename=r'c:\x\{}.y'.format('SRWLIB_VirtBL_LCLS_SXR_01'), ) assert not error, \ '{}: should be valid input'.format(base_py) pkunit.assert_object_with_json(b, actual)
def _read_twiss_header(run_dir): path = py.path.local(run_dir).join(_ZGOUBI_TWISS_FILE) res = [] for line in pkio.read_text(path).split('\n'): for var in line.split('@ '): values = var.split() if len(values) and values[0] in _TWISS_SUMMARY_LABELS: v = values[2] if re.search(r'[a-z]{2}', v, re.IGNORECASE): pass else: v = float(v) res.append([values[0], _TWISS_SUMMARY_LABELS[values[0]], v]) return res
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import opal with pkunit.save_chdir_work(): for name in ('CSR Bend Drift', 'CTF3 RF Photoinjector'): data = _example_data(name) data['report'] = 'animation' actual = opal.python_source_for_model(data, None) outfile = name.lower().replace(' ', '-') + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_parse_madx_file(): from pykern import pkio, pkjson from pykern.pkunit import pkeq from sirepo.template import madx, madx_parser with pkunit.save_chdir_work(): for name in ('particle_track', 'alba'): actual = madx_parser.parse_file(pkio.read_text( pkunit.data_dir().join(f'{name}.madx'))) del actual['version'] outfile = f'{name}.json' pkjson.dump_pretty(actual, outfile) expect = pkjson.load_any(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import shadow with pkunit.save_chdir_work(): for name in ('Complete Beamline', 'Wiggler'): data = _example_data(name) data['report'] = 'watchpointReport{}'.format(data.models.beamline[-1].id) actual = shadow._generate_parameters_file(data) outfile = data.models.simulation.simulationId + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_beam_solver(): """Ensure BeamSolver interface solves and produces output""" from pykern import pkio from pykern.pkunit import pkeq from rslinac.solver import BeamSolver f = _files() with pkunit.save_chdir_work(): pkio.write_text('Solenoid.txt', pkio.read_text(pkunit.data_dir().join('Solenoid.txt'))) solver = BeamSolver(f['ini'], f['input']) solver.solve() solver.save_output(f['output']) assert f['output'].exists() v = solver.get_structure_parameters(1) assert v[2] == 0.0006 solver.dump_bin('all-data.bin') for outfile in ('PARSED.TXT', 'test1.pid'): expect = pkio.read_text(pkunit.data_dir().join(outfile)) actual = pkio.read_text(pkunit.work_dir().join(outfile)) pkeq(expect, actual) solver.load_bin('all-data.bin') v = solver.get_structure_parameters(1) assert v[2] == 0.0006
def test_init_tree(): """Normal case""" from pykern import pkio from pykern.pkcli import projex from pykern import pkunit with pkunit.save_chdir_work(): name = 'proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call( ['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_tree( name=name, author='zauthor', author_email='*****@*****.**', description='some python project', license='MIT', url='http://example.com', ) pkio.write_text('tests/test_1.py', 'def test_1(): pass') for expect_fn, expect_re in ( ('.gitignore', 'MANIFEST.in'), ('LICENSE', 'The MIT License'), ('README.md', 'licenses/MIT'), ('docs/_static/.gitignore', ''), ('docs/_templates/.gitignore', ''), ('docs/index.rst', name), ('setup.py', "author='zauthor'"), ('setup.py', r':copyright:.*zauthor\.'), ('tests/.gitignore', '_work'), (name + '/__init__.py', ''), (name + '/package_data/.gitignore', ''), ( '{}/{}_console.py'.format(name, name), r"main\('{}'\)".format(name), ), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re) subprocess.check_call(['git', 'commit', '-m', 'initial']) # Do not install from PyPI pykern_path = py.path.local(__file__).dirpath().dirpath().dirpath() # pykern must be installed for setup.py to be able to be called subprocess.check_call(['pip', 'install', '-e', str(pykern_path)]) subprocess.check_call(['python', 'setup.py', 'test']) subprocess.check_call(['python', 'setup.py', 'tox'])
def test_importer(import_req): from pykern.pkunit import pkeq from sirepo.template import opal from sirepo.template import opal_parser import re with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*.in')): error = None try: data, files = opal_parser.parse_file(pkio.read_text(fn), filename=fn) except Exception as e: pkdlog(pkdexc()) error = str(e) if error: actual = error else: data['report'] = 'animation' actual = opal.python_source_for_model(data, None) outfile = re.sub(r'\.in$', '.txt', fn.basename) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def run(cfg_dir): data = simulation_db.read_json(template_common.INPUT_BASE_NAME) report = data['report'] if 'bunchReport' in report or report == 'twissReport' or report == 'twissReport2': try: with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) template.save_report_data(data, py.path.local(cfg_dir)) except Exception as e: res = template.parse_error_log(py.path.local(cfg_dir)) or { 'error': str(e), } simulation_db.write_result(res) else: raise RuntimeError('unknown report: {}'.format(report))
def test_from_elegant_to_madx_and_back(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant from sirepo.template.elegant import ElegantMadxConverter with pkunit.save_chdir_work() as d: for name in ('SPEAR3', 'Compact Storage Ring', 'Los Alamos Proton Storage Ring'): data = _example_data(name) actual = ElegantMadxConverter().to_madx_text(data) outfile = name.lower().replace(' ', '-') + '.madx' pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile)) lattice = ElegantMadxConverter().from_madx_text(actual) outfile = name.lower().replace(' ', '-') + '.lte' actual = elegant.python_source_for_model(lattice, None) pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile))
def post_execution_processing(success_exit=True, is_parallel=False, run_dir=None, **kwargs): # TODO(e-carlin): share with synergia (and possibly radia) if success_exit: return None e = None f = run_dir.join('mpi_run.out') if f.exists(): m = re.search( r'^ Error message is (.*?)\n', pkio.read_text(f), re.MULTILINE | re.DOTALL, ) if m: e = m.group(1) return e
def run_background(cfg_dir): res = {} data = simulation_db.read_json(template_common.INPUT_BASE_NAME) distribution = data['models']['bunch']['distribution'] run_with_mpi = distribution == 'lattice' or distribution == 'file' try: with pkio.save_chdir(cfg_dir): if run_with_mpi: mpi.run_script(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE)) else: #TODO(pjm): MPI doesn't work with rsbeams distributions yet exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) except Exception as e: res = { 'error': str(e), } if run_with_mpi and 'error' in res: text = pkio.read_text('mpi_run.out') m = re.search(r'^Traceback .*?^\w*Error: (.*?)\n\n', text, re.MULTILINE|re.DOTALL) if m: res['error'] = m.group(1) # remove output file - write_result() will not overwrite an existing error output pkio.unchecked_remove(simulation_db.json_filename(template_common.OUTPUT_BASE_NAME)) simulation_db.write_result(res)
def test_generate_python(): from pykern import pkio from pykern import pkunit from sirepo.template import synergia import re with pkunit.save_chdir_work() as d: for f in pkio.sorted_glob(pkunit.data_dir().join('*.txt')): e = pkio.read_text(f) m = re.search(r'^#\s*(.*\S)\s*$', e, flags=re.MULTILINE) assert m name = m.group(1) a = synergia._generate_parameters_file(_example_data(name)) pkio.write_text(f.basename, a) pkunit.pkeq(e, a, 'diff {} {}', f, d.join(f.basename))
def assert_object_with_json(basename, actual): """Converts actual to JSON and compares with data_dir/basename.json Reads data_dir/basename.json and compares with actual converted to json. Trailing newline is managed properly. The keys are sorted and indentation is 4. actual written to work_dir. Args: expected_basename (str): file to be found in data_dir with json suffix actual (object): to be serialized as json """ actual = pkdpretty(actual) fn = '{}.json'.format(basename) pkio.write_text(work_dir().join(fn), actual) expect = pkio.read_text(data_dir().join(fn)) assert expect == actual
def _run_elegant(bunch_report=False, with_mpi=False): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) if bunch_report and re.search('\&sdds_beam\s', elegant_file): return pkio.write_text('elegant.lte', lattice_file) ele = 'elegant.ele' pkio.write_text(ele, elegant_file) kwargs = { 'output': ELEGANT_LOG_FILE, 'env': elegant_common.subprocess_env(), } #TODO(robnagler) Need to handle this specially, b/c different binary if execution_mode == 'parallel' and with_mpi and mpi.cfg.cores > 1: return mpi.run_program(['Pelegant', ele], **kwargs) pksubprocess.check_call_with_signals(['elegant', ele], msg=pkdp, **kwargs)
def test_write_text(): """Also tests read_text""" from pykern import pkunit from pykern import pkio d = pkunit.empty_work_dir() expect_res = d.join('anything') expect_content = 'something' res = pkio.write_text(str(expect_res), expect_content) assert expect_res == res, \ 'Verify result is file path as py.path.Local' with open(str(expect_res)) as f: assert expect_content == f.read(), \ 'When write_text is called, it should write "something"' assert expect_content == pkio.read_text(str(expect_res)), \ 'When read_text, it should read "something"'
def _generate_source(fc, sim, name): from pykern.pkunit import pkeq resp = fc.sr_get( 'pythonSource', { 'simulation_id': sim['models']['simulation']['simulationId'], 'simulation_type': sim['simulationType'], }, ) filename = '{}.py'.format(name.lower()) filename = re.sub(r'\s', '-', filename) filename = re.sub(r'[^a-z0-9\-\.]', '', filename) with open(str(pkunit.work_dir().join(filename)), 'wb') as f: f.write(resp.data) expect = pkio.read_text(pkunit.data_dir().join(filename)) pkeq(expect, resp.data)
def post_execution_processing(success_exit=True, is_parallel=False, run_dir=None, **kwargs): if success_exit: return None if not is_parallel: return _parse_synergia_log(run_dir) e = None f = run_dir.join('mpi_run.out') if f.exists(): m = re.search( r'^Traceback .*?^\w*Error: (.*?)\n', pkio.read_text(f), re.MULTILINE | re.DOTALL, ) if m: e = m.group(1) return e
def test_init_tree(): """Normal case""" with pkunit.save_chdir_work(): name = 'proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_tree( name=name, author='zauthor', author_email='*****@*****.**', description='some python project', license='MIT', url='http://example.com', ) pkio.write_text('tests/test_1.py', 'def test_1(): pass') for expect_fn, expect_re in ( ('.gitignore', 'MANIFEST.in'), ('LICENSE', 'The MIT License'), ('README.md', 'licenses/MIT'), ('docs/_static/.gitignore', ''), ('docs/_templates/.gitignore', ''), ('docs/index.rst', name), ('requirements.txt', 'pykern'), ('setup.py', "author='zauthor'"), ('setup.py', r':copyright:.*zauthor\.'), ('tests/.gitignore', '_work'), (name + '/__init__.py', ''), (name + '/package_data/.gitignore', ''), ( '{}/{}_console.py'.format(name, name), r"main\('{}'\)".format(name), ), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re) subprocess.check_call(['git', 'commit', '-m', 'initial']) # Do not install from PyPI pkio.write_text( 'requirements.txt', '-e ' + str(py.path.local(__file__).dirpath().dirpath().dirpath()), ); subprocess.check_call(['python', 'setup.py', 'test']) subprocess.check_call(['python', 'setup.py', 'tox'])
def assert_object_with_json(basename, actual): """Converts actual to JSON and compares with data_dir/basename.json Reads data_dir/basename.json and compares with actual converted to json. Trailing newline is managed properly. The keys are sorted and indentation is 4. actual written to work_dir. Args: expected_basename (str): file to be found in data_dir with json suffix actual (object): to be serialized as json """ actual = pkdpretty(actual) fn = '{}.json'.format(basename) pkio.write_text(work_dir().join(fn), actual) expect = pkio.read_text(data_dir().join(fn)) assert expect == actual, \ '{}: unexpected result'.format(basename)
def test_init(capsys): from pykern import pkunit f = pkunit.empty_work_dir().join('f1') from pykern.pkdebug import pkdp, init init(output=f) pkdp('init1') out, err = capsys.readouterr() assert '' == err, \ 'When output is a file name, nothing goes to err' from pykern import pkio assert 'init1\n' in pkio.read_text(f), \ 'File output should contain msg' init(output=None, want_pid_time=True) pkdp('init2') out, err = capsys.readouterr() assert re.search(r'\w{3} .\d \d\d:\d\d:\d\d +\d+ +\d+ ', err), \ 'When output has time, matches regex'
def _run_srw(): #TODO(pjm): need to properly escape data values, untrusted from client data = simulation_db.read_json(template_common.INPUT_BASE_NAME) exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) locals()['main']() # special case for importing python code if data['report'] == 'backgroundImport': sim_id = data['models']['simulation']['simulationId'] parsed_data['models']['simulation']['simulationId'] = sim_id #TODO(pjm): assumes the parent directory contains the simulation data, # can't call simulation_db.save_simulation_json() because user isn't set for pkcli commands simulation_db.write_json('../{}'.format(simulation_db.SIMULATION_DATA_FILE), parsed_data) simulation_db.write_result({ 'simulationId': sim_id, }) else: simulation_db.write_result(extract_report_data(get_filename_for_model(data['report']), data))
def test_importer(): from pykern import pkcollections from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(FlaskRequest(fn)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice( data, elegant._build_filename_map(data), elegant._build_beamline_map(data), pkcollections.Dict(), ), ) else: data2 = elegant.import_file(FlaskRequest('{}.lte'.format(fn)), test_data=data) actual = elegant._generate_commands( data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), pkcollections.Dict(), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual pkeq(expect, actual)
def test_init_rs_tree(): """Normal case""" with pkunit.save_chdir_work(): name = 'rs_proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_rs_tree( description='some radiasoftee project', ) for expect_fn, expect_re in ( ('LICENSE', 'Apache License'), ('setup.py', "author='RadiaSoft LLC'"), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re)
def _run_elegant(bunch_report=False, with_mpi=False): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) if bunch_report and re.search('\&sdds_beam\s', elegant_file): return pkio.write_text('elegant.lte', lattice_file) ele = 'elegant.ele' pkio.write_text(ele, elegant_file) # TODO(robnagler) Need to handle this specially, b/c different binary env = copy.deepcopy(os.environ) env['RPN_DEFNS'] = pkresource.filename('defns.rpn') if with_mpi and mpi.cfg.cores > 1: return mpi.run_program(['Pelegant', ele], output=ELEGANT_LOG_FILE, env=env) pksubprocess.check_call_with_signals( ['elegant', ele], output=ELEGANT_LOG_FILE, env=env, msg=pkdp, )
def _run_elegant(bunch_report=False, with_mpi=False): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text('elegant.lte', lattice_file) ele = 'elegant.ele' pkio.write_text(ele, elegant_file) kwargs = { 'output': ELEGANT_LOG_FILE, 'env': elegant_common.subprocess_env(), } try: #TODO(robnagler) Need to handle this specially, b/c different binary if execution_mode == 'parallel' and with_mpi and mpi.cfg.cores > 1: mpi.run_program(['Pelegant', ele], **kwargs) else: pksubprocess.check_call_with_signals(['elegant', ele], msg=pkdlog, **kwargs) except Exception as e: # ignore elegant failures - errors will be parsed from the log pass
def twiss_to_madx(elegant_twiss_file, madx_twiss_file): outfile = 'sdds_output.txt' twiss_file = 'twiss-with-mu.sdds' # convert elegant psix to mad-x MU, rad --> rad / 2pi pksubprocess.check_call_with_signals([ 'sddsprocess', elegant_twiss_file, '-define=column,mux,psix 2 pi * /', '-define=column,muy,psiy 2 pi * /', twiss_file, ], output=outfile, env=elegant_common.subprocess_env()) pksubprocess.check_call_with_signals([ 'sdds2stream', twiss_file, '-columns={}'.format(','.join(map(lambda x: x[0], _ELEGANT_TO_MADX_COLUMNS))), ], output=outfile, env=elegant_common.subprocess_env()) lines = pkio.read_text(outfile).split('\n') header = '* {}\n$ \n'.format(' '.join(map(lambda x: x[1], _ELEGANT_TO_MADX_COLUMNS))) pkio.write_text(madx_twiss_file, header + '\n'.join(lines) + '\n')