def test_unchecked_remove(): """Also tests mkdir_parent""" from pykern import pkunit from pykern import pkio with pkunit.save_chdir_work(): fn = 'f1' # Should not throw an exception pkio.unchecked_remove(fn) pkio.write_text(fn, 'hello') pkio.unchecked_remove(fn) assert not os.path.exists(fn), \ 'When file removed, should be gone' for f in ('d1', 'd2/d3'): assert py.path.local(f) == pkio.mkdir_parent(f), \ 'When mkdir_parent is called, returns path passed in' assert os.path.exists('d1'), \ 'When single directory, should exist' assert os.path.exists('d2/d3'), \ 'When nested directory, should exist' with pytest.raises(AssertionError): pkio.unchecked_remove('.') with pytest.raises(AssertionError): pkio.unchecked_remove(os.getcwd()) with pytest.raises(AssertionError): pkio.unchecked_remove('/')
def _proprietary_codes(): """Get proprietary files and put it in the proprietary code dir Args: uri (str): where to get file (file:// or http://) """ import sirepo.feature_config import sirepo.sim_data import sirepo.srdb import urllib.error import urllib.request for s in sirepo.feature_config.cfg().proprietary_sim_types: f = sirepo.sim_data.get_class(s).proprietary_code_tarball() if not f: return r = pkio.mkdir_parent(sirepo.srdb.proprietary_code_dir(s), ).join(f) # POSIT: download/installers/flash-tarball/radiasoft-download.sh u = f'{cfg.proprietary_code_uri}/{s}-dev.tar.gz' try: urllib.request.urlretrieve(u, r) except urllib.error.URLError as e: if not isinstance(e.reason, FileNotFoundError): raise pkdlog('uri={} not found; mocking empty file={}', u, r) pkio.write_text( r, 'mocked by sirepo.pkcli.setup_dev', )
def render_file(filename, j2_ctx, output=None, strict_undefined=False, jinja_env=None): """Render filename as template with j2_ctx. Args: basename (str): name without jinja extension j2_ctx (dict): how to replace values in Jinja2 template output (str): file name of output; if None, return str strict_undefined (bool): set `jinja2.StrictUndefined` if True jinja_env (dict): add values to jinja2 environment Returns: str: rendered template """ t = pkio.read_text(filename) kw = dict( trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True, extensions=['jinja2.ext.do'], ) if strict_undefined: kw['undefined'] = jinja2.StrictUndefined if jinja_env: kw.update(jinja_env) je = jinja2.Environment(**kw) res = je.from_string(t).render(j2_ctx) if output: pkio.write_text(output, res) return res
def test_importer(import_req): from pykern import pkcollections from pykern import pkjson from pykern.pkunit import pkeq from sirepo.template import zgoubi import sirepo.sim_data with pkunit.save_chdir_work() as w: for fn in pkio.sorted_glob(pkunit.data_dir().join('*.dat')): error = None try: data = zgoubi.import_file(import_req(fn), unit_test_mode=True) sirepo.sim_data.get_class('zgoubi').fixup_old_data(data) #TODO(pjm): easier way to convert nested dict to pkcollections.Dict? data = pkcollections.json_load_any(pkjson.dump_pretty(data)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: actual = zgoubi.python_source_for_model(data) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, w.join(outfile))
def _proprietary_codes(): """Get proprietary RPMs and put it in the proprietary code dir Args: uri (str): where to get RPM (file:// or http://) """ import sirepo.feature_config import sirepo.sim_data import sirepo.srdb import urllib.error import urllib.request for s in sirepo.feature_config.cfg().proprietary_sim_types: r = pkio.mkdir_parent(sirepo.srdb.proprietary_code_dir(s), ).join( sirepo.sim_data.get_class(s).proprietary_code_rpm(), ) # POSIT: download/installers/rpm-code/dev-build.sh u = f'{cfg.proprietary_code_uri}/rscode-{s}-dev.rpm' try: urllib.request.urlretrieve(u, r) except urllib.error.URLError as e: if not isinstance(e.reason, FileNotFoundError): raise pkdlog('uri={} not found; mocking empty rpm={}', u, r) pkio.write_text( r, 'mocked by sirepo.pkcli.setup_dev', )
def write_files(self, data, source_path, dest_dir): """writes files for the simulation Returns: PKDict: structure of files written (debugging only) """ class _G(_Generate): def _abspath(basename): return source_path.new(basename=basename) def _input_file(self, model_name, field, filename): return filename def _lattice_filename(self, value): return value g = _G(data) g.sim() v = g.jinja_env r = PKDict( commands=dest_dir.join(source_path.basename), lattice=self._lattice_path(dest_dir, data), ) pkio.write_text(r.commands, v.commands) pkio.write_text(r.lattice, v.rpn_variables + v.lattice) for f in set( LatticeUtil(data, _SCHEMA).iterate_models( lattice.InputFileIterator(_SIM_DATA)).result, ): f = _SIM_DATA.lib_file_name_without_type(f) dest_dir.join(f).mksymlinkto(source_path.new(basename=f), absolute=False) f = g.filename_map r.output_files = [f[k] for k in f.keys_in_order] return r
def render_file(filename, j2_ctx, output=None, strict_undefined=False): """Render filename as template with j2_ctx. Args: basename (str): name without jinja extension j2_ctx (dict): how to replace values in Jinja2 template output (str): file name of output; if None, return str strict_undefined (bool): set `jinja2.StrictUndefined` if True Returns: str: rendered template """ t = pkio.read_text(filename) kw = dict( trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True, ) if strict_undefined: kw['undefined'] = jinja2.StrictUndefined je = jinja2.Environment(**kw) res = je.from_string(t).render(j2_ctx) if output: pkio.write_text(output, res) return res
def write_parameters(data, run_dir, is_parallel): _extract_zip(data, run_dir) pkio.write_text( #TODO: generate python instead run_dir.join('flash.par'), _generate_parameters_file(data), )
def write_parameters(data, run_dir, is_parallel): # remove centrailzed geom files pkio.unchecked_remove(_geom_file(data.simulationId), _dmp_file(data.simulationId)) pkio.write_text( run_dir.join(template_common.PARAMETERS_PYTHON_FILE), _generate_parameters_file(data), )
def test_import(): from pykern import pkjson from pykern.pkunit import pkeq from sirepo.template import flash_parser import re def _parse_config(fn): return flash_parser.ConfigParser().parse(pkio.read_text(fn)) def _parse_par(fn): data_file = fn.basename.replace('-flash.par', '') return flash_parser.ParameterParser().parse( pkjson.load_any( pkio.read_text( pkunit.data_dir().join(f'{data_file}-sirepo-data.json'))), pkio.read_text(fn), ) with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if re.search(r'-Config$', fn.basename): parser = _parse_config elif re.search(r'flash.par$', fn.basename): parser = _parse_par else: continue try: actual = pkjson.dump_pretty(parser(fn)) except Exception as e: pkdlog(pkdexc()) actual = str(e) outfile = f'{fn.basename}.out' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_from_elegant_to_madx_and_back(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant, madx, madx_converter, madx_parser with pkunit.save_chdir_work() as d: for name in ('SPEAR3', 'Compact Storage Ring', 'Los Alamos Proton Storage Ring'): data = _example_data(name) mad = madx_parser.parse_file( elegant.python_source_for_model(data, 'madx')) madx._fixup_madx(mad) outfile = name.lower().replace(' ', '-') + '.madx' actual = madx.python_source_for_model(mad, None) pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile)) data = madx_parser.parse_file(actual) lattice = madx_converter.from_madx(elegant.SIM_TYPE, data) outfile = name.lower().replace(' ', '-') + '.lte' actual = elegant.python_source_for_model(lattice, None) pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile))
def _run_elegant(): run_dir = os.getcwd() with open('in.json') as f: data = json.load(f) exec(pkio.read_text('elegant_parameters.py'), locals(), locals()) pkio.write_text('elegant.lte', lattice_file) pkio.write_text('elegant.ele', elegant_file) call(['elegant', 'elegant.ele']) index = 0 if sdds.sddsdata.InitializeInput(index, 'elegant.bun') != 1: sdds.sddsdata.PrintErrors(1) column_names = sdds.sddsdata.GetColumnNames(index) errorCode = sdds.sddsdata.ReadPage(index) if errorCode != 1: sdds.sddsdata.PrintErrors(1) bunch = data['models'][data['report']] x = sdds.sddsdata.GetColumn(index, column_names.index(bunch['x'])) y = sdds.sddsdata.GetColumn(index, column_names.index(bunch['y'])) nbins = int(bunch['histogramBins']) hist, edges = np.histogramdd([x, y], nbins) info = { 'x_range': [float(edges[0][0]), float(edges[0][-1]), len(hist)], 'y_range': [float(edges[1][0]), float(edges[1][-1]), len(hist[0])], 'x_label': _FIELD_LABEL[bunch['x']], 'y_label': _FIELD_LABEL[bunch['y']], 'title': _plot_title(bunch), 'z_matrix': hist.T.tolist(), } with open('out.json', 'w') as outfile: json.dump(info, outfile)
def sbatch_script(path): """Write script to path Args: path (str): where to write file """ pkio.write_text(path, _script())
def generate_input_file(self, kwarg_dict, directory): # Get strings for each file and fill in arguments for this job for key, val in self.setup['file_mapping'].items(): local_file_instance = self.get_file_def_module().__getattribute__( key).format(**kwarg_dict) pkio.write_text(os.path.join(directory, val), local_file_instance)
def write_files(self, data, source_path, dest_dir): """writes files for the simulation Returns: PKDict: structure of files written (debugging only) """ def _unescape(value): return re.sub(r'\\\\', r'\\', value) class _G(_Generate): def _abspath(self, basename): return source_path.new(basename=basename) def _input_file(self, model_name, field, filename): return filename def _lattice_filename(self, value): return value g = _G(data, update_output_filenames=False) g.sim() v = g.jinja_env r = PKDict( commands=dest_dir.join(source_path.basename), lattice=self._lattice_path(dest_dir, data), ) pkio.write_text(r.commands, _unescape(v.commands)) if not r.lattice.exists(): pkio.write_text(r.lattice, v.rpn_variables + v.lattice) self._write_input_files(data, source_path, dest_dir) f = g.filename_map r.output_files = [f[k] for k in f.keys_in_order] return r
def _run_jspec(data): _elegant_to_madx(data['models']['ring']) exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) jspec_filename = template.JSPEC_INPUT_FILENAME pkio.write_text(jspec_filename, jspec_file) pksubprocess.check_call_with_signals(['jspec', jspec_filename], msg=pkdlog, output=template.JSPEC_LOG_FILE) return pkio.read_text(template.JSPEC_LOG_FILE)
def _generate_ptc_particles_file(run_dir, data, twiss): bunch = data.models.bunch p = _ptc_particles( PKDict( x=_bunch_twiss(bunch, 'x'), y=_bunch_twiss(bunch, 'y'), ), bunch.numberOfParticles, bunch.randomSeed, ) v = PKDict( x=template.to_floats(p.x.pos), px=template.to_floats(p.x.p), y=template.to_floats(p.y.pos), py=template.to_floats(p.y.p), t=template.to_floats(p.t.pos), pt=template.to_floats(p.t.p), ) if 'bunchReport' in data.report: v.summaryData = twiss simulation_db.write_json(run_dir.join(template.BUNCH_PARTICLES_FILE), v) r = '' for i in range(len(v.x)): r += 'ptc_start' for f in ('x', 'px', 'y', 'py', 't', 'pt'): r += f', {f}={v[f][i]}' r +=';\n' pkio.write_text(run_dir.join(template.PTC_PARTICLES_FILE), r)
def twiss_to_madx(elegant_twiss_file, madx_twiss_file): outfile = 'sdds_output.txt' twiss_file = 'twiss-with-mu.sdds' # convert elegant psix to mad-x MU, rad --> rad / 2pi pksubprocess.check_call_with_signals([ 'sddsprocess', elegant_twiss_file, '-define=column,mux,psix 2 pi * /', '-define=column,muy,psiy 2 pi * /', twiss_file, ], output=outfile, env=elegant_common.subprocess_env()) pksubprocess.check_call_with_signals([ 'sdds2stream', twiss_file, '-columns={}'.format(','.join( map(lambda x: x[0], _ELEGANT_TO_MADX_COLUMNS))), ], output=outfile, env=elegant_common.subprocess_env()) lines = pkio.read_text(outfile).split('\n') header = '* {}\n$ \n'.format(' '.join( map(lambda x: x[1], _ELEGANT_TO_MADX_COLUMNS))) pkio.write_text(madx_twiss_file, header + '\n'.join(lines) + '\n')
def write_parameters(data, run_dir, is_parallel): if data.report == 'setupAnimation': return pkio.write_text( run_dir.join(_FLASH_PAR_FILE), _generate_parameters_file(data, run_dir=run_dir), )
def _run_hellweg(cfg_dir): r = template_common.exec_parameters() pkio.write_text(template.HELLWEG_INPUT_FILE, r.input_file) pkio.write_text(template.HELLWEG_INI_FILE, r.ini_file) s = solver.BeamSolver(template.HELLWEG_INI_FILE, template.HELLWEG_INPUT_FILE) s.solve() s.save_output(template.HELLWEG_SUMMARY_FILE) s.dump_bin(template.HELLWEG_DUMP_FILE)
def _run_tunes_report(cfg_dir, data): r = template_common.exec_parameters() pkio.write_text(template.TUNES_INPUT_FILE, r.tunes_file) #TODO(pjm): uses datafile from animation directory os.symlink('../animation/zgoubi.fai', 'zgoubi.fai') subprocess.call([_TUNES_PATH]) template_common.write_sequential_result( template.extract_tunes_report(cfg_dir, data))
def write_status(status, run_dir): """Write status to simulation Args: status (str): pending, running, completed, canceled run_dir (py.path): where to write the file """ pkio.write_text(run_dir.join(_STATUS_FILE), status)
def write_parameters(data, run_dir, is_parallel): m = re.search('^user_alert=(.*)', data.models.dog.breed) if m: raise sirepo.util.UserAlert(m.group(1), 'log msg should not be sent') pkio.write_text( run_dir.join(template_common.PARAMETERS_PYTHON_FILE), _generate_parameters_file(data), )
def write_parameters(data, run_dir, is_parallel, python_file=template_common.PARAMETERS_PYTHON_FILE): pkio.write_text( run_dir.join(python_file), _generate_parameters_file(data), )
def _run_jspec(data): _elegant_to_madx(data['models']['ring']) r = template_common.exec_parameters() f = template.JSPEC_INPUT_FILENAME pkio.write_text(f, r.jspec_file) pksubprocess.check_call_with_signals(['jspec', f], msg=pkdlog, output=template.JSPEC_LOG_FILE)
def _add_host(j2_ctx, srv, host): from rsconf.component import docker_registry from rsconf.component import rsconf netrc = rsconf.host_init(j2_ctx, host) pkio.write_text(srv.join(host + '-netrc'), netrc) if host == j2_ctx.master: docker_registry.host_init(j2_ctx, host)
def _run_hellweg(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.HELLWEG_INPUT_FILE, input_file) pkio.write_text(template.HELLWEG_INI_FILE, ini_file) solver = BeamSolver(template.HELLWEG_INI_FILE, template.HELLWEG_INPUT_FILE) solver.solve() solver.save_output(template.HELLWEG_SUMMARY_FILE) solver.dump_bin(template.HELLWEG_DUMP_FILE)
def _run_jspec(run_dir): with pkio.save_chdir(run_dir): data = simulation_db.read_json(template_common.INPUT_BASE_NAME) _elegant_to_madx(data['models']['ring']) exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) jspec_filename = template.JSPEC_INPUT_FILENAME pkio.write_text(jspec_filename, jspec_file) pksubprocess.check_call_with_signals(['jspec', jspec_filename], msg=pkdp, output=template.JSPEC_LOG_FILE) return pkio.read_text(template.JSPEC_LOG_FILE)
def _run_tunes_report(cfg_dir, data): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.TUNES_INPUT_FILE, tunes_file) #TODO(pjm): uses datafile from animation directory os.symlink('../animation/zgoubi.fai', 'zgoubi.fai') subprocess.call([_TUNES_PATH]) simulation_db.write_result(template.extract_tunes_report( cfg_dir, data))
def write_parameters(data, schema, run_dir, is_parallel): """Write the parameters file Args: data (dict): input schema (dict): to validate data run_dir (py.path): where to write is_parallel (bool): run in background? """ pkio.write_text(run_dir.join(template_common.PARAMETERS_PYTHON_FILE), generate_parameters_file(data, is_parallel))
def _run_hellweg(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.HELLWEG_INPUT_FILE, input_file) pkio.write_text(template.HELLWEG_INI_FILE, ini_file) s = solver.BeamSolver(template.HELLWEG_INI_FILE, template.HELLWEG_INPUT_FILE) s.solve() s.save_output(template.HELLWEG_SUMMARY_FILE) s.dump_bin(template.HELLWEG_DUMP_FILE)
def _save_field_csv(field_type, vectors, scipy_rotation, path): # reserve first line for a header data = [f'x,y,z,{field_type}x,{field_type}y,{field_type}z'] pts, mags, dirs = _rotate_fields(vectors, scipy_rotation, True) for i in range(len(mags)): j = 3 * i r = pts[j:j + 3] r = numpy.append(r, mags[i] * dirs[j:j + 3]) data.append(','.join(map(str, r))) pkio.write_text(path, '\n'.join(data)) return path
def write_parameters(data, run_dir, is_parallel): pkio.write_text( run_dir.join(OPAL_INPUT_FILE), _generate_parameters_file(data), ) if is_parallel: pkio.write_text( run_dir.join(OPAL_POSITION_FILE), 'import os\n' \ + 'os.system("python data/opal_ElementPositions.py --export-vtk")\n', )
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import shadow with pkunit.save_chdir_work(): for name in ('Complete Beamline', 'Wiggler'): data = _example_data(name) data['report'] = 'watchpointReport{}'.format(data.models.beamline[-1].id) actual = shadow._generate_parameters_file(data) outfile = data.models.simulation.simulationId + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def run_background(cfg_dir): with pkio.save_chdir(cfg_dir): fn = 'run_background.py' cmd = [sys.executable or 'python', fn] script = pkio.read_text('srw_parameters.py') p = dict(pkcollections.map_items(cfg)) if cfg.slave_processes > 1: cmd[0:0] = [ 'mpiexec', '-n', # SRW includes a master process so 2 really needs 3 processes str(cfg.slave_processes + 1), ] script += ''' from mpi4py import MPI if MPI.COMM_WORLD.Get_rank(): import signal signal.signal(signal.SIGTERM, lambda x, y: MPI.COMM_WORLD.Abort(1)) ''' else: # In interactive (dev) mode, output as frequently as possible p['particles_per_slave'] = 1 script += ''' import srwl_bl v = srwl_bl.srwl_uti_parse_options(get_srw_params(), use_sys_argv=False) source_type, mag = setup_source(v) v.wm = True v.wm_nm = {total_particles} v.wm_na = {particles_per_slave} # Number of "iterations" per save is best set to num processes v.wm_ns = {slave_processes} op = get_beamline_optics() srwl_bl.SRWLBeamline(_name=v.name).calc_all(v, op) '''.format(**p) pkio.write_text(fn, script) try: p = subprocess.Popen( cmd, stdin=open(os.devnull), stdout=open('run_background.out', 'w'), stderr=subprocess.STDOUT, ) signal.signal(signal.SIGTERM, lambda x, y: p.terminate()) rc = p.wait() if rc != 0: p = None raise RuntimeError('child terminated: retcode={}'.format(rc)) finally: if not p is None: p.terminate()
def assert_object_with_json(basename, actual): """Converts actual to JSON and compares with data_dir/basename.json Reads data_dir/basename.json and compares with actual converted to json. Trailing newline is managed properly. The keys are sorted and indentation is 4. actual written to work_dir. Args: expected_basename (str): file to be found in data_dir with json suffix actual (object): to be serialized as json """ actual = pkdpretty(actual) fn = '{}.json'.format(basename) pkio.write_text(work_dir().join(fn), actual) expect = pkio.read_text(data_dir().join(fn)) assert expect == actual, \ '{}: unexpected result'.format(basename)
def test_init_tree(): """Normal case""" with pkunit.save_chdir_work(): name = 'proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_tree( name=name, author='zauthor', author_email='*****@*****.**', description='some python project', license='MIT', url='http://example.com', ) pkio.write_text('tests/test_1.py', 'def test_1(): pass') for expect_fn, expect_re in ( ('.gitignore', 'MANIFEST.in'), ('LICENSE', 'The MIT License'), ('README.md', 'licenses/MIT'), ('docs/_static/.gitignore', ''), ('docs/_templates/.gitignore', ''), ('docs/index.rst', name), ('requirements.txt', 'pykern'), ('setup.py', "author='zauthor'"), ('setup.py', r':copyright:.*zauthor\.'), ('tests/.gitignore', '_work'), (name + '/__init__.py', ''), (name + '/package_data/.gitignore', ''), ( '{}/{}_console.py'.format(name, name), r"main\('{}'\)".format(name), ), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re) subprocess.check_call(['git', 'commit', '-m', 'initial']) # Do not install from PyPI pkio.write_text( 'requirements.txt', '-e ' + str(py.path.local(__file__).dirpath().dirpath().dirpath()), ); subprocess.check_call(['python', 'setup.py', 'test']) subprocess.check_call(['python', 'setup.py', 'tox'])
def test_importer(): from pykern import pkcollections from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(FlaskRequest(fn)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice( data, elegant._build_filename_map(data), elegant._build_beamline_map(data), pkcollections.Dict(), ), ) else: data2 = elegant.import_file(FlaskRequest('{}.lte'.format(fn)), test_data=data) actual = elegant._generate_commands( data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), pkcollections.Dict(), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual pkeq(expect, actual)
def _run_elegant(bunch_report=False, with_mpi=False): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text('elegant.lte', lattice_file) ele = 'elegant.ele' pkio.write_text(ele, elegant_file) kwargs = { 'output': ELEGANT_LOG_FILE, 'env': elegant_common.subprocess_env(), } try: #TODO(robnagler) Need to handle this specially, b/c different binary if execution_mode == 'parallel' and with_mpi and mpi.cfg.cores > 1: mpi.run_program(['Pelegant', ele], **kwargs) else: pksubprocess.check_call_with_signals(['elegant', ele], msg=pkdlog, **kwargs) except Exception as e: # ignore elegant failures - errors will be parsed from the log pass
def hack_nfs_write_status(status, run_dir): """Verify status file exists before writing. NFS doesn't propagate files immediately so there is a race condition when the celery worker starts. This file handles this case. Args: status (str): pending, running, completed, canceled run_dir (py.path): where to write the file """ fn = run_dir.join(_STATUS_FILE) for i in range(cfg.nfs_tries): if fn.check(file=True): break time.sleep(cfg.nfs_sleep) # Try once always pkio.write_text(fn, status)
def write_parameters(data, run_dir, is_parallel): """Write the parameters file Args: data (dict): input run_dir (py.path): where to write is_parallel (bool): run in background? """ pkio.write_text( run_dir.join(template_common.PARAMETERS_PYTHON_FILE), generate_parameters_file( data, is_parallel, ), ) for f in _simulation_files(data): if re.search(r'SCRIPT-commandFile', f): os.chmod(str(run_dir.join(f)), stat.S_IRUSR | stat.S_IXUSR)
def _run_elegant(bunch_report=False, with_mpi=False): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) if bunch_report and re.search('\&sdds_beam\s', elegant_file): return pkio.write_text('elegant.lte', lattice_file) ele = 'elegant.ele' pkio.write_text(ele, elegant_file) # TODO(robnagler) Need to handle this specially, b/c different binary env = copy.deepcopy(os.environ) env['RPN_DEFNS'] = pkresource.filename('defns.rpn') if with_mpi and mpi.cfg.cores > 1: return mpi.run_program(['Pelegant', ele], output=ELEGANT_LOG_FILE, env=env) pksubprocess.check_call_with_signals( ['elegant', ele], output=ELEGANT_LOG_FILE, env=env, msg=pkdp, )
def twiss_to_madx(elegant_twiss_file, madx_twiss_file): outfile = 'sdds_output.txt' twiss_file = 'twiss-with-mu.sdds' # convert elegant psix to mad-x MU, rad --> rad / 2pi pksubprocess.check_call_with_signals([ 'sddsprocess', elegant_twiss_file, '-define=column,mux,psix 2 pi * /', '-define=column,muy,psiy 2 pi * /', twiss_file, ], output=outfile, env=elegant_common.subprocess_env()) pksubprocess.check_call_with_signals([ 'sdds2stream', twiss_file, '-columns={}'.format(','.join(map(lambda x: x[0], _ELEGANT_TO_MADX_COLUMNS))), ], output=outfile, env=elegant_common.subprocess_env()) lines = pkio.read_text(outfile).split('\n') header = '* {}\n$ \n'.format(' '.join(map(lambda x: x[1], _ELEGANT_TO_MADX_COLUMNS))) pkio.write_text(madx_twiss_file, header + '\n'.join(lines) + '\n')
def test_walk_tree_and_sorted_glob(): """Looks in work_dir""" from pykern import pkunit from pykern import pkio with pkunit.save_chdir_work() as pwd: for f in ('d1/d7', 'd2/d3', 'd4/d5/d6'): pkio.mkdir_parent(f) expect = [] for f in ['d1/d7/f1', 'd4/d5/f2', 'd2/d3/f3']: pkio.write_text(f, '') expect.append(py.path.local(f)) assert sorted(expect) == list(pkio.walk_tree('.')), \ 'When walking tree, should only return files' assert [expect[2]] == list(pkio.walk_tree('.', 'f3')), \ 'When walking tree with file_re, should only return matching files' assert [expect[0]] == list(pkio.walk_tree('.', '^d1')), \ 'When walking tree with file_re, file to match does not include dir being searched' assert pkio.sorted_glob('*[42]') == [py.path.local(f) for f in ('d2', 'd4')]
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) simulation_db.save_simulation_json(simulation_type, data) for d in simulation_db.simulation_dir(simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, ) ) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def run_script(script): """Execute python script with mpi. Args: script (str): python text """ abort = ''' from mpi4py import MPI if MPI.COMM_WORLD.Get_rank(): import signal signal.signal(signal.SIGTERM, lambda x, y: MPI.COMM_WORLD.Abort(1)) ''' n = re.sub(r'^from __future.*', abort, script, count=1, flags=re.MULTILINE) script = abort + script if n == script else n fn = 'mpi_run.py' pkio.write_text(fn, script) p = None return run_program([sys.executable or 'python', fn])
def render_file(filename, values, output=None): """Render filename as template with values. Args: basename (str): name without jinja extension values (dict): how to replace values output (str): file name of output; if None, return str Returns: str: rendered template """ t = pkio.read_text(filename) je = jinja2.Environment( trim_blocks=True, lstrip_blocks=True, keep_trailing_newline=True, ) res = je.from_string(t).render(values) if output: pkio.write_text(output, res) return res
def test_init_and_run(monkeypatch): from pykern import pkio from pykern import pkunit from pykern.pkcli import sim from pykern.pkcli import rsmanifest import netrc import os import os.path import re import subprocess cfg = pkunit.cfg.aux.get('sim_test', None) if not cfg: # No testing if there's no auth config return u, p = cfg.split(' ') monkeypatch.setattr(netrc, 'netrc', _netrc) _netrc.result = (u, None, p) with pkunit.save_chdir_work(is_pkunit_prefix=True): f = 'out/log' expect_code = pkunit.random_alpha() pkio.write_text('run.sh', 'echo {}>{}'.format(expect_code, f)) rsmanifest.pkunit_setup() sim._cmd_init() sim._cmd_run() x = subprocess.check_output(['git', 'remote', '-v']), m = re.search(r'/(sim-sim_work-\d+-\d+)\.git', x[0]) repo = m.group(1) pkunit.pkok(m, 'git remote: failed: {}', x) pkunit.pkeq(expect_code, pkio.read_text('out/log').rstrip()) os.remove('run.sh') sim._cmd_pip('djson') pkio.write_text('run.py', 'import djson'.format(expect_code, f)) sim._cmd_run() sim._git_api_request( 'delete', 'repositories/{user}/{repo}', dict(repo=repo), )
def test_importer(): from sirepo.template import elegant with pkunit.save_chdir_work(): for filename in _FILES: error, data = elegant.import_file(TestFlaskRequest(filename)) outfile = '{}.txt'.format(filename) if error: actual = error else: if '.lte' in filename: data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice(data, elegant._build_filename_map(data), elegant._build_beamline_map(data), {})) else: err2, data2 = elegant.import_file(TestFlaskRequest('{}.lte'.format(filename)), test_data=data) actual = elegant._generate_commands(data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), {}) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual if expect != actual: assert False
def test_write_text(): """Also tests read_text""" from pykern import pkunit from pykern import pkio d = pkunit.empty_work_dir() expect_res = d.join('anything') expect_content = 'something' res = pkio.write_text(str(expect_res), expect_content) assert expect_res == res, \ 'Verify result is file path as py.path.Local' with open(str(expect_res)) as f: assert expect_content == f.read(), \ 'When write_text is called, it should write "something"' assert expect_content == pkio.read_text(str(expect_res)), \ 'When read_text, it should read "something"'
def import_python(code, tmp_dir, lib_dir, user_filename=None, arguments=None): """Converts script_text into json and stores as new simulation. Avoids too much data back to the user in the event of an error. This could be a potential security issue, because the script could be used to probe the system. Args: simulation_type (str): always "srw", but used to find lib dir code (str): Python code that runs SRW user_filename (str): uploaded file name for log arguments (str): argv to be passed to script Returns: dict: simulation data """ script = None # Patch for the mirror profile for the exported .py file from Sirepo: code = _patch_mirror_profile(code, lib_dir) try: with pkio.save_chdir(tmp_dir): # This string won't show up anywhere script = pkio.write_text('in.py', code) o = SRWParser( script, lib_dir=py.path.local(lib_dir), user_filename=user_filename, arguments=arguments, ) return o.data except Exception as e: lineno = script and _find_line_in_trace(script) # Avoid pkdlog( 'Error: {}; exception={}; script={}; filename={}; stack:\n{}', e.message, e, script, user_filename, pkdexc(), ) e = str(e)[:50] raise ValueError( 'Error on line {}: {}'.format(lineno, e) if lineno else 'Error: {}'.format(e))
def import_python(code, tmp_dir, lib_dir, user_filename=None): """Converts script_text into json and stores as new simulation. Avoids too much data back to the user in the event of an error. This could be a potential security issue, because the script could be used to probe the system. Args: simulation_type (str): always "srw", but used to find lib dir code (str): Python code that runs SRW user_filename (str): uploaded file name for log Returns: error: string containing error or None dict: simulation data """ error = 'Import failed: error unknown' script = None try: with pkio.save_chdir(tmp_dir): # This string won't show up anywhere script = pkio.write_text('in.py', code) o = SRWParser( script, lib_dir=py.path.local(lib_dir), user_filename=user_filename, ) return None, o.data except Exception as e: lineno = _find_line_in_trace(script) if script else None # Avoid pkdp( 'Error: {}; exception={}; script={}; filename={}; stack:\n{}', error, e, script, user_filename, traceback.format_exc(), ) error = 'Error on line {}: {}'.format(lineno or '?', str(e)[:50]) return error, None
def write_parameters(data, run_dir, is_parallel): pkio.write_text( run_dir.join(template_common.PARAMETERS_PYTHON_FILE), _generate_parameters_file(data), )
def test_1(): with pkunit.save_chdir_work(): pkio.write_text('do_not_include_in_sdist.py', 'some text')