def _code(files=None): from pykern import pkunit, pkio, pkjson from pykern.pkdebug import pkdp import inspect import sirepo.lib for i, s in enumerate( pkio.sorted_glob(pkunit.data_dir().join( f'{inspect.stack()[1].function.split("_")[1]}_*', ))): t = s.basename.split('_')[0] d = sirepo.lib.Importer(t).parse_file( pkio.sorted_glob(s.join('first*'))[0]) d2 = d.copy() d2.pkdel('version') for k in [k for k in d2.keys() if '_SimData__' in k]: d2.pkdel(k) pkunit.file_eq(s.join('out.json'), d2) w = pkunit.work_dir().join(s.basename) r = d.write_files(w) for o in pkio.sorted_glob(pkunit.data_dir().join(s.basename, '*.out')): pkunit.file_eq(o, actual_path=w.join(o.basename).new(ext='')) if files: pkunit.pkok( set(files[i]).issubset(set(r.output_files)), 'expecting files={} to be subset of output_files={}', files, r.output_files, )
def upgrade(): """Upgrade the database""" from pykern import pkio from sirepo import simulation_db from sirepo import server import re def _inc(m): return m.group(1) + str(int(m.group(2)) + 1) server.init() for d in pkio.sorted_glob(simulation_db.user_dir_name().join('*/warppba')): for fn in pkio.sorted_glob(d.join('*/sirepo-data.json')): with open(str(fn)) as f: t = f.read() for old, new in ( ('"WARP example laser simulation"', '"Laser-Plasma Wakefield"'), ('"Laser Pulse"', '"Laser-Plasma Wakefield"'), ('"WARP example electron beam simulation"', '"Electron Beam"'), ): if not old in t: continue t = t.replace(old, new) t = re.sub(r'(simulationSerial":\s+)(\d+)', _inc, t) break with open(str(fn), 'w') as f: f.write(t)
def resource_files(): """Library shared between simulations of this type Returns: list: py.path.local objects """ return pkio.sorted_glob(elegant_common.RESOURCE_DIR.join('*.sdds'))
def create_examples(): """Adds missing app examples to all users. """ from pykern import pkio from sirepo import feature_config from sirepo import server from sirepo import simulation_db from sirepo import cookie server.init() for d in pkio.sorted_glob(simulation_db.user_dir_name('*')): if _is_src_dir(d): continue; uid = simulation_db.uid_from_dir_name(d) cookie.init_mock(uid) for sim_type in feature_config.cfg.sim_types: simulation_db.verify_app_directory(sim_type) names = map( lambda x: x['name'], simulation_db.iterate_simulation_datafiles(sim_type, simulation_db.process_simulation_list, { 'simulation.isExample': True, })) for s in simulation_db.examples(sim_type): if s.models.simulation.name not in names: simulation_db.save_new_example(s)
def _do(file_ext, parse): from pykern import pkio from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkfail, pkok from sirepo import srunit import re fc = srunit.flask_client(sim_types='srw:myapp') fc.sr_login_as_guest() for suffix in '', ' 2', ' 3': for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): json, stream = parse(f) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.sr_get_root(sim_type) is_dev = 'deviance' in f.basename if not is_dev: sim_name = pkcollections.json_load_any(json).models.simulation.name res = fc.sr_post_form( 'importFile', { 'file': (stream, f.basename), 'folder': '/importer_test', }, {'simulation_type': sim_type}, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkeq(expect, res.error) continue pkeq(sim_name + suffix, res.models.simulation.name)
def test_purge_users(monkeypatch): from pykern.pkunit import pkeq, pkok from pykern.pkdebug import pkdp from pykern import pkio from pykern import pkconfig from sirepo import srunit srunit.init_auth_db(sim_types='myapp') from sirepo.pkcli import admin from sirepo import simulation_db from sirepo import auth_db import datetime res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: no old users so empty') g = simulation_db.user_dir_name('*') dirs = list(pkio.sorted_glob(g)) pkeq(1, len(dirs), '{}: expecting exactly one user dir', g) uid = dirs[0].basename #TODO(robnagler) really want the db to be created, but need # a test oauth class. monkeypatch.setattr(auth_db, 'all_uids', lambda: [uid]) for f in pkio.walk_tree(dirs[0]): f.setmtime(f.mtime() - 86400 * 2) res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: all users registered so no deletes') monkeypatch.setattr(auth_db, 'all_uids', lambda: []) res = admin.purge_users(days=1, confirm=False) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(dirs[0].check(dir=True), '{}: nothing deleted', res) res = admin.purge_users(days=1, confirm=True) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
def test_importer(import_req): from pykern import pkcollections from pykern import pkjson from pykern.pkunit import pkeq from sirepo.template import zgoubi import sirepo.sim_data with pkunit.save_chdir_work() as w: for fn in pkio.sorted_glob(pkunit.data_dir().join('*.dat')): error = None try: data = zgoubi.import_file(import_req(fn), unit_test_mode=True) sirepo.sim_data.get_class('zgoubi').fixup_old_data(data) #TODO(pjm): easier way to convert nested dict to pkcollections.Dict? data = pkcollections.json_load_any(pkjson.dump_pretty(data)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: actual = zgoubi.python_source_for_model(data) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, w.join(outfile))
def _do(fc, file_ext, parse): from pykern.pkcollections import PKDict from pykern import pkio, pkcompat from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp, pkdlog from pykern.pkunit import pkeq, pkfail, pkok, pkre import re for suffix in (('',) if file_ext == 'py' else ('', ' 2', ' 3')): for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): pkdlog('file={}', f) json = pkcompat.from_bytes(parse(f)) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.sr_get_root(sim_type) is_dev = 'deviance' in f.basename res = fc.sr_post_form( 'importFile', PKDict(folder='/importer_test'), PKDict(simulation_type=sim_type), file=f, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkre(expect, res.error) continue elif file_ext == 'py': sim_name = f.purebasename else: sim_name = pkcollections.json_load_any(json).models.simulation.name assert 'models' in res, \ f'file={f} res={res}' pkeq(sim_name + suffix, res.models.simulation.name)
def test_purge_users(monkeypatch): from pykern.pkunit import pkeq, pkok from pykern.pkdebug import pkdp from pykern import pkio from pykern import pkconfig from sirepo import srunit srunit.init_user_db() from sirepo.pkcli import admin from sirepo import simulation_db from sirepo import api_auth import datetime res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: no old users so empty') pkdp(simulation_db.user_dir_name('*')) g = simulation_db.user_dir_name('*') dirs = list(pkio.sorted_glob(g)) pkeq(1, len(dirs), '{}: expecting exactly one user dir', g) uid = dirs[0].basename #TODO(robnagler) really want the db to be created, but need # a test oauth class. monkeypatch.setattr(api_auth, 'all_uids', lambda: [uid]) for f in pkio.walk_tree(dirs[0]): f.setmtime(f.mtime() - 86400 * 2) res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: all users registered so no deletes') monkeypatch.setattr(api_auth, 'all_uids', lambda: []) res = admin.purge_users(days=1, confirm=False) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(dirs[0].check(dir=True), '{}: nothing deleted', res) res = admin.purge_users(days=1, confirm=True) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
def test_import(): from pykern import pkjson from pykern.pkunit import pkeq from sirepo.template import flash_parser import re def _parse_config(fn): return flash_parser.ConfigParser().parse(pkio.read_text(fn)) def _parse_par(fn): data_file = fn.basename.replace('-flash.par', '') return flash_parser.ParameterParser().parse( pkjson.load_any( pkio.read_text( pkunit.data_dir().join(f'{data_file}-sirepo-data.json'))), pkio.read_text(fn), ) with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if re.search(r'-Config$', fn.basename): parser = _parse_config elif re.search(r'flash.par$', fn.basename): parser = _parse_par else: continue try: actual = pkjson.dump_pretty(parser(fn)) except Exception as e: pkdlog(pkdexc()) actual = str(e) outfile = f'{fn.basename}.out' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def get_application_data(data): if data['method'] == 'get_elegant_sim_list': res = [] for f in pkio.sorted_glob(_elegant_dir().join('*/', _ELEGANT_TWISS_PATH)): m = re.match(r'.*?/elegant/(.*?)/animation', str(f)) if not m: continue id = m.group(1) name = simulation_db.read_json(_elegant_dir().join(id, '/', simulation_db.SIMULATION_DATA_FILE)).models.simulation.name res.append({ 'simulationId': id, 'name': name, }) return { 'simList': res, } elif data['method'] == 'compute_particle_ranges': run_dir = simulation_db.simulation_run_dir({ 'simulationType': SIM_TYPE, 'simulationId': data['simulationId'], 'report': 'animation', }) return { 'fieldRange': _compute_range_across_files(run_dir), }
def _do(file_ext, parse): from pykern import pkio from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkfail, pkok from sirepo import srunit import re fc = srunit.flask_client() for suffix in '', ' (2)', ' (3)': for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): json, stream = parse(f) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.get('/{}'.format(sim_type)) is_dev = 'deviance' in f.basename if not is_dev: sim_name = pkcollections.json_load_any(json).models.simulation.name res = fc.sr_post_form( 'importFile', { 'file': (stream, f.basename), 'folder': '/importer_test', }, {'simulation_type': sim_type}, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkeq(expect, res.error) continue pkeq(sim_name + suffix, res.models.simulation.name)
def test_elegant_upload_sdds(fc): from pykern import pkio from pykern import pkunit from pykern.pkcollections import PKDict from pykern.pkdebug import pkdp import sirepo.sim_data d = fc.sr_sim_data('Compact Storage Ring') r = fc.sr_post_form( 'uploadFile', params=PKDict( simulation_type=fc.sr_sim_type, simulation_id=d.models.simulation.simulationId, file_type='bunchFile-sourceFile', ), data=PKDict(), # somename.bun was created with: # d.models.bunch.n_particles_per_bunch = 50 file=pkunit.data_dir().join('somename.bun'), ) import sirepo.srdb #TODO(robnagler) make easier to get at this in tests g = pkio.sorted_glob(sirepo.srdb.root().join('user', fc.sr_uid, 'elegant', 'lib', '*')) pkunit.pkeq(1, len(g)) pkunit.pkeq('bunchFile-sourceFile.somename.bun', g[0].basename)
def __init__(self, *args, **kwargs): super(T, self).__init__(*args, **kwargs) self.root_d = pkio.py_path(cfg.root_d) self.db_d = self.root_d.join(DB_SUBDIR) self.proprietary_source_d = self.root_d.join(PROPRIETARY_SUBDIR) self.rpm_source_d = self.root_d.join(RPM_SUBDIR) self.tmp_d = self.root_d.join(TMP_SUBDIR) self.secret_d = self.db_d.join(SECRET_SUBDIR) self.srv_d = self.root_d.join(SRV_SUBDIR) self.srv_host_d = self.srv_d.join(HOST_SUBDIR) self.base = PKDict() f = None try: for d in self.db_d, self.secret_d: for f in pkio.sorted_glob(d.join(ZERO_YML)): v = pkyaml.load_str( pkjinja.render_file( f, self.base, strict_undefined=True, ), ) merge_dict(self.base, v) except Exception: pkdlog('error rendering db={}', f) raise
def test_importer(import_req): from pykern.pkcollections import PKDict from sirepo.template import elegant import sirepo.lib for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('.ele.lte'): continue k = PKDict() pkdlog('file={}', fn) if fn.basename.startswith('deviance-'): try: data = elegant.import_file(import_req(fn)) except Exception as e: k.actual = f'{e}\n' else: k.actual = 'did not raise exception' elif fn.ext == '.lte': data = elegant.import_file(import_req(fn)) data['models']['commands'] = [] g = elegant._Generate(data) g.sim() j = g.jinja_env k.actual = j.rpn_variables + j.lattice else: f = sirepo.lib.Importer('elegant').parse_file(fn).write_files(pkunit.work_dir()) k.actual_path = f.commands pkunit.file_eq(fn.basename + '.txt', **k)
def copy_related_files(data, source_path, target_path): # copy results and log for the long-running simulations for m in ('animation', ): # copy any simulation output s = pkio.py_path(source_path).join(m) if not s.exists(): continue t = pkio.py_path(target_path).join(m) pkio.mkdir_parent(str(t)) for f in pkio.sorted_glob('*'): f.copy(t)
def test_elegant(): from pykern.pkdebug import pkdp from pykern import pkunit, pkio, pkjson import sirepo.lib import shutil for s in pkio.sorted_glob(pkunit.data_dir().join('*')): t = s.basename.split('_')[0] d = sirepo.lib.Importer(t).parse_file( pkio.sorted_glob(s.join('first*'))[0]) d2 = d.copy() d2.pkdel('version') for k in [k for k in d2.keys() if '_SimData__' in k]: d2.pkdel(k) pkunit.file_eq(s.join('out.json'), d2) w = pkunit.work_dir().join(s.basename) r = d.write_files(w) #TODO(robnagler) may not exist in all cases pkunit.pkeq('run_setup.acceptance.sdds', r.output_files[0]) for o in pkio.sorted_glob(pkunit.data_dir().join(s.basename, '*.out')): pkunit.file_eq(o, actual_path=w.join(o.basename).new(ext=''))
def test_remove_srw_report_dir(fc): from pykern import pkio from pykern import pkunit import sirepo.srdb m = 'intensityReport' data = fc.sr_sim_data('NSLS-II ESM beamline') fc.sr_run_sim(data, m) g = pkio.sorted_glob(sirepo.srdb.root().join('user', fc.sr_uid, 'srw', '*', m)) pkunit.pkeq(1, len(g)) pkio.unchecked_remove(*g) fc.sr_run_sim(data, m)
def test_generate_python(): from pykern import pkio from pykern import pkunit from sirepo.template import synergia import re with pkunit.save_chdir_work(): for f in pkio.sorted_glob(pkunit.data_dir().join('*.txt')): e = f.read() m = re.search(r'^#\s*(.*\S)\s*$', e, flags=re.MULTILINE) assert m name = m.group(1) a = synergia._generate_parameters_file(_example_data(name)) pkio.write_text(f.basename, a) pkunit.pkeq(e, a)
def test_generate_python(): from pykern import pkio from pykern import pkunit from sirepo.template import synergia import re with pkunit.save_chdir_work() as d: for f in pkio.sorted_glob(pkunit.data_dir().join('*.txt')): e = pkio.read_text(f) m = re.search(r'^#\s*(.*\S)\s*$', e, flags=re.MULTILINE) assert m pkunit.file_eq( f, synergia._generate_parameters_file(_example_data( m.group(1)), ), )
def _import(fc): from pykern import pkio from pykern import pkunit import zipfile res = [] for f in pkio.sorted_glob(pkunit.data_dir().join('*.zip')): with zipfile.ZipFile(str(f)) as z: expect = sorted(z.namelist() + ['run.py']) d = fc.sr_post_form( 'importFile', { 'file': (open(str(f), 'rb'), f.basename), 'folder': '/exporter_test', }, ) res.append((d.simulationType, d.models.simulation.name, expect)) return res
def test_importer(): from pykern import pkcollections from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(FlaskRequest(fn)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice( data, elegant._build_filename_map(data), elegant._build_beamline_map(data), pkcollections.Dict(), ), ) else: data2 = elegant.import_file(FlaskRequest('{}.lte'.format(fn)), test_data=data) actual = elegant._generate_commands( data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), pkcollections.Dict(), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual pkeq(expect, actual)
def find_global_simulation(sim_type, sid, checked=False): paths = pkio.sorted_glob(user_dir_name().join('*', sim_type, sid)) if len(paths) == 1: return str(paths[0]) if len(paths) == 0: if checked: util.raise_not_found( '{}/{}: global simulation not found', sim_type, sid, ) return None util.raise_not_found( '{}: more than one path found for simulation={}/{}', paths, sim_type, sid, )
def get_application_data(data, **kwargs): if data.method == 'get_madx_sim_list': res = [] for f in pkio.sorted_glob( _SIM_DATA.controls_madx_dir().join( '*', sirepo.simulation_db.SIMULATION_DATA_FILE, ), ): m = sirepo.simulation_db.read_json(f).models res.append( PKDict( name=m.simulation.name, simulationId=m.simulation.simulationId, invalidMsg=None if _has_kickers(m) else 'No beamlines' if not _has_beamline(m) else 'No kickers')) return PKDict(simList=res) elif data.method == 'get_external_lattice': return _get_external_lattice(data.simulationId)
def get_application_data(data): if data['method'] == 'get_elegant_sim_list': res = [] for f in pkio.sorted_glob(_elegant_dir().join('*/', _ELEGANT_TWISS_PATH)): m = re.match(r'.*?/elegant/(.*?)/animation', str(f)) if not m: continue id = m.group(1) name = simulation_db.read_json(_elegant_dir().join(id, '/', simulation_db.SIMULATION_DATA_FILE)).models.simulation.name res.append({ 'simulationId': id, 'name': name, }) return { 'simList': res, } elif data['method'] == 'compute_particle_ranges': return template_common.compute_field_range(data, _compute_range_across_files) assert False, 'unknown application data method: {}'.format(data['method'])
def test_walk_tree_and_sorted_glob(): """Looks in work_dir""" from pykern import pkunit from pykern import pkio with pkunit.save_chdir_work() as pwd: for f in ('d1/d7', 'd2/d3', 'd4/d5/d6'): pkio.mkdir_parent(f) expect = [] for f in ['d1/d7/f1', 'd4/d5/f2', 'd2/d3/f3']: pkio.write_text(f, '') expect.append(py.path.local(f)) assert sorted(expect) == list(pkio.walk_tree('.')), \ 'When walking tree, should only return files' assert [expect[2]] == list(pkio.walk_tree('.', 'f3')), \ 'When walking tree with file_re, should only return matching files' assert [expect[0]] == list(pkio.walk_tree('.', '^d1')), \ 'When walking tree with file_re, file to match does not include dir being searched' assert pkio.sorted_glob('*[42]') == [py.path.local(f) for f in ('d2', 'd4')]
def test_importer(import_req): from pykern.pkcollections import PKDict from pykern.pkunit import pkeq from sirepo.template import lattice from sirepo.template import elegant import sirepo.util import flask with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(import_req(fn)) except Exception as e: pkdlog(pkdexc()) error = str(e) if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant._generate_lattice( elegant._build_filename_map(data), lattice.LatticeUtil(data, elegant._SCHEMA), ), ) else: #TODO(robnagler) test simulationId data2 = elegant.import_file(import_req(fn.new(ext='ele.lte')), test_data=data) actual = elegant._generate_commands( elegant._build_filename_map(data2), lattice.LatticeUtil(data2, elegant._SCHEMA), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def purge_users(days=180, confirm=False): """Remove old users from db which have not registered. Args: days (int): maximum days of untouched files (old is mtime > days) confirm (bool): delete the directories if True (else don't delete) [False] Returns: list: directories removed (or to remove if confirm) """ from pykern import pkio from sirepo import server from sirepo import simulation_db from sirepo import api_auth import datetime days = int(days) assert days >= 1, \ '{}: days must be a positive integer' server.init() uids = api_auth.all_uids() now = datetime.datetime.utcnow() to_remove = [] for d in pkio.sorted_glob(simulation_db.user_dir_name('*')): if _is_src_dir(d): continue; #TODO(pjm): need to skip special "src" user if simulation_db.uid_from_dir_name(d) in uids: continue for f in pkio.walk_tree(d): if (now - now.fromtimestamp(f.mtime())).days <= days: break else: to_remove.append(d) if confirm: pkio.unchecked_remove(*to_remove) return to_remove
def _purge(self): expires = datetime.datetime.utcnow() - cfg.keep_days for d in pkio.sorted_glob('[0-9]' * len(self._date_d)): t = datetime.datetime.utcfromtimestamp(d.stat().mtime) if t < expires: pkio.unchecked_remove(d)
def resource_files(): return pkio.sorted_glob(_RESOURCE_DIR.join('*.txt'))
def resource_files(): return pkio.sorted_glob(RESOURCE_DIR.join('beamlist*.txt'))