def test_checked_call(): from pykern import pkunit from pykern.pkunit import pkeq import sys import subprocess with pkunit.save_chdir_work(): cmd = [sys.executable, str(pkunit.data_dir().join('p1.py'))] for i, a in enumerate(( ('normal', 0), ('exit-1', 1), ('divide-zero', 1), ('normal-rank-all', 0), ('divide-zero-rank-2', 86), ('exit-13-rank-0', 13), )): f = '{}.out'.format(i) with open(f, 'w') as o: c = cmd + [a[0]] print(a[0]) if 'rank' in a[0]: c = ['mpiexec', '-n', '4'] + c actual = subprocess.call( c, stdout=o, stderr=subprocess.STDOUT, ) pkeq(a[1], actual, '{}: exit({})\n{}', ' '.join(c), actual, open(f).read())
def _do(file_ext, parse): from pykern import pkio from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkfail, pkok from sirepo import srunit import re fc = srunit.flask_client() for suffix in '', ' (2)', ' (3)': for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): json, stream = parse(f) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.get('/{}'.format(sim_type)) is_dev = 'deviance' in f.basename if not is_dev: sim_name = pkcollections.json_load_any(json).models.simulation.name res = fc.sr_post_form( 'importFile', { 'file': (stream, f.basename), 'folder': '/importer_test', }, {'simulation_type': sim_type}, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkeq(expect, res.error) continue pkeq(sim_name + suffix, res.models.simulation.name)
def test_purge_users(monkeypatch): from pykern.pkunit import pkeq, pkok from pykern.pkdebug import pkdp from pykern import pkio from pykern import pkconfig from sirepo import srunit srunit.init_user_db() from sirepo.pkcli import admin from sirepo import simulation_db from sirepo import api_auth import datetime res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: no old users so empty') pkdp(simulation_db.user_dir_name('*')) g = simulation_db.user_dir_name('*') dirs = list(pkio.sorted_glob(g)) pkeq(1, len(dirs), '{}: expecting exactly one user dir', g) uid = dirs[0].basename #TODO(robnagler) really want the db to be created, but need # a test oauth class. monkeypatch.setattr(api_auth, 'all_uids', lambda: [uid]) for f in pkio.walk_tree(dirs[0]): f.setmtime(f.mtime() - 86400 * 2) res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: all users registered so no deletes') monkeypatch.setattr(api_auth, 'all_uids', lambda: []) res = admin.purge_users(days=1, confirm=False) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(dirs[0].check(dir=True), '{}: nothing deleted', res) res = admin.purge_users(days=1, confirm=True) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
def test_py_path(): from pykern import pkunit from pykern import pkio from pykern.pkunit import pkeq with pkunit.save_chdir_work(): d = pkunit.data_dir() pkeq(d, pkio.py_path(d))
def test_json_load_any(): """Validate json_load_any()""" import json j = json.dumps({'a': 33}) j2 = pkcollections.json_load_any(j) pkeq( 33, j2.a, '{}: j2.a is not 33', j2.a, ) j = json.dumps({'a': 33, 'b': {'values': 'will collide, but ok'}}) j2 = pkcollections.json_load_any(j) pkcollections.json_load_any(j, object_pairs_hook=pkcollections.Dict)
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import shadow with pkunit.save_chdir_work(): for name in ('Complete Beamline', 'Wiggler'): data = _example_data(name) data['report'] = 'watchpointReport{}'.format(data.models.beamline[-1].id) actual = shadow._generate_parameters_file(data) outfile = data.models.simulation.simulationId + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_importer(): from pykern import pkcollections from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(FlaskRequest(fn)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice( data, elegant._build_filename_map(data), elegant._build_beamline_map(data), pkcollections.Dict(), ), ) else: data2 = elegant.import_file(FlaskRequest('{}.lte'.format(fn)), test_data=data) actual = elegant._generate_commands( data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), pkcollections.Dict(), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual pkeq(expect, actual)
def test_srw(): from pykern import pkio from pykern.pkdebug import pkdpretty from pykern.pkunit import pkeq from sirepo import srunit import json fc = srunit.flask_client() resp = fc.get('/srw') assert '<!DOCTYPE html' in resp.get_data(), \ 'Top level document is html' data = fc.sr_post( 'listSimulations', {'simulationType': 'srw', 'search': ''}, ) pkeq(fc.get('/find-by-name/srw/default/UndulatorRadiation').status_code, 404) for sep in (' ', '%20', '+'): pkeq(fc.get('/find-by-name/srw/default/Undulator{}Radiation'.format(sep)).status_code, 200)
def test_in_request(op, cfg=None, before_request=None, headers=None, want_cookie=True): from sirepo import uri_router fc = flask_client(cfg) try: if before_request: before_request(fc) setattr( server.app, server.SRUNIT_TEST_IN_REQUEST, pkcollections.Dict(op=op, want_cookie=want_cookie), ) resp = fc.get( uri_router.srunit_uri, headers=headers, ) pkunit.pkeq(200, resp.status_code, 'FAIL: resp={}', resp.status) finally: delattr(server.app, server.SRUNIT_TEST_IN_REQUEST) return resp
def test_add_code(): from pykern import pkio from pykern import pkjson from pykern import pkunit from pykern.pkunit import pkok, pkeq, pkre from pykern.pkdebug import pkdp from pykern.pkcli import rsmanifest import re with pkunit.save_chdir_work(is_pkunit_prefix=True) as d: rsmanifest.add_code('A', 'b', 'c', 'd', pyenv='v') j = pkjson.load_any(pkio.py_path(rsmanifest.USER_FILE).read()) pkok(20170101.0 < float(j.version), 'version must be after 2017') pkeq('A', j.codes.v.a.name) pkeq('b', j.codes.v.a.version) rsmanifest.add_code('a', 'bb', 'cc', 'dd') j = pkjson.load_any(pkio.expand_user_path(rsmanifest.USER_FILE).read()) pkeq('A', j.codes.v.a.name) pkeq('a', j.codes[''].a.name) pkeq('bb', j.codes[''].a.version) pkre('20.*T.*Z', j.codes[''].a.installed)
def test_extract_tfs_pages(): from pykern.pkunit import pkeq from sirepo.template import madx_parser path = pkunit.data_dir().join('ptc_track.file.tfs') header = madx_parser.parse_tfs_file(path, header_only=True) pkeq(header, ['number', 'turn', 'x', 'px', 'y', 'py', 't', 'pt', 's', 'e']) info = madx_parser.parse_tfs_page_info(path) pkeq( info[3], dict( name='BPMY1', turn='1', s='2.72', ), ) res = madx_parser.parse_tfs_file(path, want_page=3) pkeq(len(res.s), 5) pkeq(res.s[0], info[3].s) res = madx_parser.parse_tfs_file(path) pkeq(len(res.s), 75)
def test_read_all(): from pykern import pkio from pykern import pkjson from pykern import pkunit from pykern.pkunit import pkok, pkeq, pkre from pykern.pkdebug import pkdp from pykern.pkcli import rsmanifest import re with pkunit.save_chdir_work(is_pkunit_prefix=True) as d: rsmanifest.add_code( 'code1', version='1.1', uri='http://x.com', source_d='/tmp', pyenv='py2', ) v = pkjson.load_any(pkio.py_path(rsmanifest.USER_FILE)).version pkjson.dump_pretty( {'version': v, 'image': {'type': 'docker'}}, filename=rsmanifest.CONTAINER_FILE, ) m = rsmanifest.read_all() pkeq(v, m.version) pkeq('docker', m.image.type) pkeq('1.1', m.codes.py2.code1.version)
def test_2_purge_users_guests_present(auth_fc): from sirepo import auth_db from pykern.pkunit import pkeq, pkok from sirepo import srunit from sirepo.pkcli import admin from sirepo import srtime if not auth_fc.sr_uid: auth_fc.sr_login_as_guest() days = 1 adjusted_time = days + 10 dirs_in_fs = _get_dirs() uids_in_db = auth_db.UserRegistration.search_all_for_column('uid') dirs_and_uids = {dirs_in_fs[0]: uids_in_db[0]} srtime.adjust_time(adjusted_time) res = admin.purge_guest_users(days=days, confirm=False) pkeq(dirs_and_uids, res, '{}: one guest user so one dir and uid to delete', res) res = admin.purge_guest_users(days=days, confirm=True) pkeq(dirs_and_uids, res, '{}: one guest user so one dir and uid to delete', res) pkok(not res.keys()[0].check(dir=True), '{}: directory deleted', res) pkeq(auth_db.UserRegistration.search_by(uid=res.values()[0]), None, '{}: expecting uid to deleted from db', res)
def test_flash_change_role_change_lib_files(auth_fc): from pykern import pkio from pykern import pkunit import sirepo.auth import sirepo.auth_db import sirepo.pkcli.roles import sirepo.srdb def _change_role(add=True): f = getattr(sirepo.pkcli.roles, 'add_roles') if not add: f = getattr(sirepo.pkcli.roles, 'delete_roles') f( fc.sr_auth_state().uid, sirepo.auth.role_for_sim_type(fc.sr_sim_type), ) def _check_file(exists=True): pkunit.pkeq( [_proprietary_file] if exists else [], [ x.basename for x in pkio.walk_tree(fc.sr_user_dir(), _proprietary_file) ], ) pkunit.data_dir().join('db').copy(sirepo.srdb.root()) _proprietary_file = 'flash.rpm' fc = auth_fc fc.sr_email_register('[email protected]', sim_type='flash') r = fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type}, raw_response=True) pkunit.pkeq(403, r.status_code) _check_file(exists=False) _change_role(add=True) _check_file(exists=True) _change_role(add=False) _check_file(exists=False)
def test_init_and_run(monkeypatch): from pykern import pkio from pykern import pkunit from pykern.pkcli import sim from pykern.pkcli import rsmanifest import netrc import os import os.path import re import subprocess #TODO(robnagler) broken: "aux" removed, need another way cfg = pkunit.cfg.aux.get('sim_test', None) if not cfg: # No testing if there's no auth config return u, p = cfg.split(' ') monkeypatch.setattr(netrc, 'netrc', _netrc) _netrc.result = (u, None, p) with pkunit.save_chdir_work(is_pkunit_prefix=True): f = 'out/log' expect_code = pkunit.random_alpha() pkio.write_text('run.sh', 'echo {}>{}'.format(expect_code, f)) rsmanifest.pkunit_setup() sim._cmd_init() sim._cmd_run() x = subprocess.check_output(['git', 'remote', '-v']), m = re.search(r'/(sim-sim_work-\d+-\d+)\.git', x[0]) repo = m.group(1) pkunit.pkok(m, 'git remote: failed: {}', x) pkunit.pkeq(expect_code, pkio.read_text('out/log').rstrip()) os.remove('run.sh') sim._cmd_pip('djson') pkio.write_text('run.py', 'import djson'.format(expect_code, f)) sim._cmd_run() sim._git_api_request( 'delete', 'repositories/{user}/{repo}', dict(repo=repo), )
def test_auth_login(): from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq from sirepo import srunit fc = srunit.flask_client({ 'SIREPO_FEATURE_CONFIG_API_MODULES': 'bluesky', 'SIREPO_BLUESKY_AUTH_SECRET': '3SExmbOzn1WeoCWeJxekaE6bMDUj034Pu5az1hLNnvENyvL1FAJ1q3eowwODoa3f', }) from sirepo import simulation_db from sirepo import bluesky from sirepo import cookie fc.get('/srw') data = fc.sr_post( 'listSimulations', {'simulationType': 'srw', 'search': {'simulationName': 'Bending Magnet Radiation'}}, ) fc.cookie_jar.clear() data = data[0].simulation req = pkcollections.Dict( simulationType='srw', simulationId=data.simulationId, ) bluesky.auth_hash(req) resp = fc.sr_post('blueskyAuth', req) pkeq('ok', resp['state']) pkeq(req.simulationId, simulation_db.parse_sid(resp['data'])) pkeq('srw', resp['schema']['simulationType'])
def test_uniquify_beamline(): from pykern import pkio from pykern import pkunit from pykern.pkunit import pkeq from pykern import pkjson from sirepo.template import madx d = pkjson.load_any(pkunit.data_dir().join('in.json')) madx.uniquify_elements(d) pkeq(1, len(d.models.beamlines), 'expecting one beamline={}', d.models.beamlines) l = d.models.beamlines[0]['items'] pkeq(len(list(set(l))), len(l), 'expecting all unique items={}', l) e = {e._id: e.original_id for e in d.models.elements} r = [e[i] for i in d.models.beamlines[0]['items']] pkeq( [ 2, 2, 5, 5, 5, 2, 2, 5, 5, 5, 2, ], r, 'expecting proper reflection of sub-lines. ids of original elements: {}', r)
def test_warpvnd_import(fc): from pykern import pkunit from pykern.pkcollections import PKDict from pykern.pkdebug import pkdp from pykern.pkunit import pkre, pkeq import sirepo.sim_data d = PKDict(name='new1', folder='/', simulationType=fc.sr_sim_type) s = sirepo.sim_data.get_class(fc.sr_sim_type) d = fc.sr_post('newSimulation', d) r = fc.sr_post_form( 'uploadFile', params=PKDict( simulation_type=fc.sr_sim_type, simulation_id=d.models.simulation.simulationId, file_type='stl-file', ), data=PKDict(confirm='1'), file=s.lib_file_resource_dir().join('stl-file.lattice_gate.stl'), ) pkeq('stl-file.lattice_gate.stl', r.filename) pkeq('stl-file', r.fileType) d2 = fc.sr_sim_data('new1') pkeq(d.models.simulation.simulationId, d2.models.simulation.simulationId) l = fc.sr_post('listSimulations', PKDict(simulationType=fc.sr_sim_type, ))
def test_srw_validate_file(fc): from pykern import pkunit from pykern.pkcollections import PKDict from pykern.pkdebug import pkdp from pykern.pkunit import pkre, pkeq import sirepo.sim_data d = fc.sr_sim_data('Sample from Image') s = sirepo.sim_data.get_class(fc.sr_sim_type) r = fc.sr_get( 'downloadFile', params=PKDict( simulation_type=fc.sr_sim_type, simulation_id=d.models.simulation.simulationId, filename='sample.tif', ), data=PKDict(), redirect=False, ) pkre('/tif', r.mimetype) f = s.lib_file_resource_dir().join('sample.tif') r = fc.sr_post_form( 'uploadFile', params=PKDict( simulation_type=fc.sr_sim_type, simulation_id=d.models.simulation.simulationId, file_type='sample', ), data=PKDict(confirm='1'), file=f, ) pkeq('sample.tif', r.filename) pkeq('sample', r.fileType) pkeq(d.models.simulation.simulationId, r.simulationId)
def test_case_insensitive(): # tests case insensitive and attribute like variables from pykern.pkcollections import PKDict from pykern.pkunit import pkeq from sirepo.template.code_variable import CodeVar, PurePythonEval code_var = CodeVar( [ PKDict( name='x.x7.x', value='123', ), PKDict( name='Y', value='x.X7.x + x.x7.x', ), ], PurePythonEval(), case_insensitive=True, ) pkeq( code_var.compute_cache( PKDict( models=PKDict( beamlines=[], elements=[], commands=[], ) ), PKDict(), ), PKDict({ 'x.x7.x': 123, 'y': 246, 'x.x7.x + x.x7.x': 246, }) ) pkeq( code_var.get_expr_dependencies('Y y +'), ['x.x7.x', 'y'], )
def test_to_environ(pkconfig_setup): pkconfig = pkconfig_setup( cfg=dict(OTHER_THING='', P1_M1_SET4='a:b'), env=dict(P1_M1_REQ8='33'), ) from pykern.pkcollections import PKDict assert PKDict(P1_M1_REQ8='33', P1_M1_SET4='a:b') == pkconfig.to_environ(['p1.*']) assert PKDict(P1_M1_REQ8='33', P1_M1_SET4='a:b', OTHER_THING='') \ == pkconfig.to_environ(['p1.*', 'other.thing']) assert PKDict() == pkconfig.to_environ(['nomatch.*']) from pykern import pkunit a = pkconfig.to_environ( ['foo.*', 'baz.*.*'], values=dict({ 'foo_bar': ['a', 'c'], 'foo.bar2': '2', 'foo_BAR3': True, 'FOO': { 'BAR4': False, 'BAR5': 5, }, 'baz.bar.exclude': 7, 'baz.nomatch': 8, 'baz.bar.foo': 9, 'nomatch.foo': '4', }), exclude_re='exclude', ) pkunit.pkeq( dict(FOO_BAR='a:c', FOO_BAR2='2', FOO_BAR3='1', FOO_BAR4='', FOO_BAR5='5', BAZ_BAR_FOO='9'), a, )
def test_auth_login(): from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq from sirepo import sr_unit fc = sr_unit.flask_client(cfg=dict( SIREPO_BLUESKY_AUTH_SECRET= '3SExmbOzn1WeoCWeJxekaE6bMDUj034Pu5az1hLNnvENyvL1FAJ1q3eowwODoa3f', ), ) from sirepo import simulation_db from sirepo import bluesky fc.get('/srw') data = fc.sr_post( 'listSimulations', { 'simulationType': 'srw', 'search': { 'simulationName': 'Bending Magnet Radiation' } }, ) data = data[0].simulation req = pkcollections.Dict( simulationType='srw', simulationId=data.simulationId, ) bluesky.auth_hash(req) resp = fc.sr_post('blueskyAuth', req) pkeq('ok', resp['state']) pkeq(req.simulationId, simulation_db.parse_sid(resp['data'])) pkeq('srw', resp['schema']['simulationType'])
def test_read_all(): from pykern import pkio from pykern import pkjson from pykern import pkunit from pykern.pkunit import pkok, pkeq, pkre from pykern.pkdebug import pkdp from pykern.pkcli import rsmanifest import re with pkunit.save_chdir_work(is_pkunit_prefix=True) as d: rsmanifest.add_code( 'code1', version='1.1', uri='http://x.com', source_d='/tmp', pyenv='py2', ) v = pkjson.load_any(pkio.py_path(rsmanifest.USER_FILE)).version pkjson.dump_pretty( { 'version': v, 'image': { 'type': 'docker' } }, filename=rsmanifest.CONTAINER_FILE, ) m = rsmanifest.read_all() pkeq(v, m.version) pkeq('docker', m.image.type) pkeq('1.1', m.codes.py2.code1.version)
def test_rename_folder(fc): from pykern.pkcollections import PKDict from pykern.pkdebug import pkdp from pykern.pkunit import pkeq import copy d = fc.sr_sim_data() d.pkupdate( name='new sim 1', folder='first folder', ) r = fc.sr_post('newSimulation', d) pkeq('/' + d.folder, r.models.simulation.folder) d2 = copy.deepcopy(d) d2.pkupdate( name='new sim 2', folder='first folder no-match', ) r2 = fc.sr_post('newSimulation', d2) n = 'new dir' fc.sr_post( 'updateFolder', PKDict( newName=n, oldName=d.folder, simulationType=fc.sr_sim_type, ), ) x = fc.sr_sim_data(d.name) pkeq('/' + n, x.models.simulation.folder) x = fc.sr_sim_data('new sim 2') pkeq(r2.models.simulation.folder, x.models.simulation.folder)
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import srw from sirepo import sr_unit fc = sr_unit.flask_client() for name in ('NSLS-II CHX beamline', 'Sample from Image', 'Boron Fiber (CRL with 3 lenses)', 'Tabulated Undulator Example', 'Gaussian X-ray beam through a Beamline containing Imperfect Mirrors', 'NSLS-II SRX beamline', 'NSLS-II ESM beamline', 'Mask example', 'NSLS-II SMI beamline'): sim = fc.sr_sim_data(srw.SIM_TYPE, name) resp = fc.sr_get( 'pythonSource', { 'simulation_id': sim['models']['simulation']['simulationId'], 'simulation_type': srw.SIM_TYPE, }, raw_response=True, ) filename = '{}.py'.format(name) with open(str(pkunit.work_dir().join(filename)), 'wb') as f: f.write(resp.data) expect = pkio.read_text(pkunit.data_dir().join(filename)) pkeq(expect, resp.data)
def test_in_request(op, cfg=None, before_request=None, headers=None, want_cookie=True, **kwargs): fc = flask_client(cfg, **kwargs) try: if before_request: before_request(fc) setattr( server._app, server.SRUNIT_TEST_IN_REQUEST, pkcollections.Dict(op=op, want_cookie=want_cookie), ) from sirepo import uri_router resp = fc.get( uri_router.srunit_uri, headers=headers, ) pkunit.pkeq(200, resp.status_code, 'FAIL: resp={}', resp.status) finally: try: delattr(server._app, server.SRUNIT_TEST_IN_REQUEST) except AttributeError: pass return resp
def test_in_request(op, cfg=None, before_request=None, headers=None, want_cookie=True, want_user=True, **kwargs): fc = flask_client(cfg, **kwargs) try: from pykern import pkunit from pykern import pkcollections if before_request: before_request(fc) setattr( server._app, server.SRUNIT_TEST_IN_REQUEST, PKDict(op=op, want_cookie=want_cookie, want_user=want_user), ) from sirepo import uri_router r = fc.get( uri_router.srunit_uri, headers=headers, ) pkunit.pkeq(200, r.status_code, 'FAIL: unexpected status={}', r.status) if r.mimetype == 'text/html': m = _JAVASCRIPT_REDIRECT_RE.search(pkcompat.from_bytes(r.data)) if m: pkunit.pkfail('unexpected redirect={}', m.group(1)) pkunit.pkfail('unexpected html response={}', r.data) d = pkcollections.json_load_any(r.data) pkunit.pkeq('ok', d.get('state'), 'FAIL: expecting state=ok, but got data={}', d) finally: try: delattr(server._app, server.SRUNIT_TEST_IN_REQUEST) except AttributeError: pass return r
def sr_auth_state(self, **kwargs): """Gets authState and prases Returns: dict: parsed auth_state """ from pykern import pkunit import pykern.pkcollections m = re.search(r'(\{.*\})', self.sr_get('authState').data) s = pykern.pkcollections.json_load_any(m.group(1)) for k, v in kwargs.items(): pkunit.pkeq( v, s[k], 'key={} expected={} != actual={}: auth_state={}', k, v, s[k], s, ) return s
def test_init_and_run(monkeypatch): from pykern import pkio from pykern import pkunit from pykern.pkcli import sim from pykern.pkcli import rsmanifest import netrc import os import os.path import re import subprocess cfg = pkunit.cfg.aux.get('sim_test', None) if not cfg: # No testing if there's no auth config return u, p = cfg.split(' ') monkeypatch.setattr(netrc, 'netrc', _netrc) _netrc.result = (u, None, p) with pkunit.save_chdir_work(is_pkunit_prefix=True): f = 'out/log' expect_code = pkunit.random_alpha() pkio.write_text('run.sh', 'echo {}>{}'.format(expect_code, f)) rsmanifest.pkunit_setup() sim._cmd_init() sim._cmd_run() x = subprocess.check_output(['git', 'remote', '-v']), m = re.search(r'/(sim-sim_work-\d+-\d+)\.git', x[0]) repo = m.group(1) pkunit.pkok(m, 'git remote: failed: {}', x) pkunit.pkeq(expect_code, pkio.read_text('out/log').rstrip()) os.remove('run.sh') sim._cmd_pip('djson') pkio.write_text('run.py', 'import djson'.format(expect_code, f)) sim._cmd_run() sim._git_api_request( 'delete', 'repositories/{user}/{repo}', dict(repo=repo), )
def test_from_elegant_to_madx_and_back(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant, madx, madx_converter, madx_parser with pkunit.save_chdir_work(): for name in ('SPEAR3', 'Compact Storage Ring', 'Los Alamos Proton Storage Ring'): data = _example_data(name) mad = madx_converter.to_madx(elegant.SIM_TYPE, data) outfile = name.lower().replace(' ', '-') + '.madx' actual = madx.python_source_for_model(mad, None) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual) data = madx_parser.parse_file(actual) lattice = madx_converter.from_madx(elegant.SIM_TYPE, data) outfile = name.lower().replace(' ', '-') + '.lte' actual = elegant.python_source_for_model(lattice, None) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_infix_to_postfix(): from pykern.pkcollections import PKDict from pykern.pkunit import pkeq from sirepo.template.code_variable import CodeVar, PurePythonEval code_var = CodeVar([], PurePythonEval(PKDict())) pkeq(code_var.infix_to_postfix('x + y * 2'), 'x y 2 * +') pkeq(code_var.infix_to_postfix('-(x)'), 'x chs') pkeq(code_var.infix_to_postfix('-(x + +x)'), 'x x + chs')
def test_purge_users(monkeypatch): from pykern.pkunit import pkeq, pkok from pykern.pkdebug import pkdp from pykern import pkio from sirepo import sr_unit # Need to initialize first sr_unit.init_user_db() from sirepo.pkcli import admin from sirepo import simulation_db from sirepo import server import datetime #TODO(pjm): tried pkconfig.reset_state_for_testing() but couldn't override bool to False server.cfg.oauth_login = False res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: no old users so empty') pkdp(simulation_db.user_dir_name('*')) g = simulation_db.user_dir_name('*') dirs = list(pkio.sorted_glob(g)) pkeq(1, len(dirs), '{}: expecting exactly one user dir', g) uid = dirs[0].basename #TODO(robnagler) really want the db to be created, but need # a test oauth class. monkeypatch.setattr(server, 'all_uids', lambda: [uid]) for f in pkio.walk_tree(dirs[0]): f.setmtime(f.mtime() - 86400 * 2) res = admin.purge_users(days=1, confirm=False) pkeq([], res, '{}: all users registered so no deletes') monkeypatch.setattr(server, 'all_uids', lambda: []) res = admin.purge_users(days=1, confirm=False) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(dirs[0].check(dir=True), '{}: nothing deleted', res) res = admin.purge_users(days=1, confirm=True) pkeq(dirs, res, '{}: no users registered so one delete', res) pkok(not dirs[0].check(dir=True), '{}: directory deleted', res)
def test_cache(): from pykern.pkcollections import PKDict from pykern.pkunit import pkeq from sirepo.template.code_variable import CodeVar, PurePythonEval code_var = CodeVar( [ PKDict( name='x', value='123', ), PKDict( name='y', value='x + x', ), PKDict( name='z', value='y * -20', ), ], PurePythonEval(), ) pkeq( code_var.compute_cache( # data PKDict(models=PKDict( beamlines=[], elements=[ PKDict( _id=1, type='point', p1='x + y', p2=234, ), ], commands=[], )), # schema PKDict(model=PKDict(point=PKDict( p1=["P1", "RPNValue", 0], p2=["P2", "RPNValue", 0], )), ), ), PKDict({ 'x + x': 246, 'x + y': 369, 'x': 123, 'y * -20': -4920, 'y': 246, 'z': -4920, })) pkeq( code_var.get_expr_dependencies('x x * x +'), ['x'], ) pkeq( code_var.get_expr_dependencies('y 2 pow'), ['x', 'y'], )
def test_energy_conversion(): from sirepo.template.template_common import ParticleEnergy energy = PKDict(pc=0.2997948399999999, ) ParticleEnergy.compute_energy('madx', 'proton', energy) pkeq( energy, PKDict( beta=0.3043592432062238, brho=1.0000079454967472, energy=0.9850032377589688, gamma=1.0498055888568685, kinetic_energy=0.04673119175896878, pc=0.2997948399999999, )) for f in energy: if f == 'kinetic_energy': continue pkeq( energy, ParticleEnergy.compute_energy('madx', 'proton', PKDict({ f: energy[f], })))
def test_basic(auth_fc, monkeypatch): from pykern import pkconfig, pkcompat from pykern.pkunit import pkeq from sirepo import srunit import base64 import sirepo.auth.basic u = auth_fc.sr_login_as_guest() sirepo.auth.basic.cfg.uid = u import sirepo.status auth_fc.cookie_jar.clear() # monkeypatch so status doesn't take so long sirepo.status._SIM_TYPE = 'myapp' sirepo.status._SIM_NAME = 'Scooby Doo' sirepo.status._SIM_REPORT = 'heightWeightReport' pkeq(401, auth_fc.sr_get('serverStatus').status_code) r = auth_fc.sr_get_json( 'serverStatus', headers=PKDict(Authorization='Basic ' + pkcompat.from_bytes( base64.b64encode(pkcompat.to_bytes(u + ':' + 'pass')), ), ), ) pkeq('ok', r.state)
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import srw from sirepo import srunit fc = srunit.flask_client() fc.get('/{}'.format(srw.SIM_TYPE)) for name in ('NSLS-II CHX beamline', 'Sample from Image', 'Boron Fiber (CRL with 3 lenses)', 'Tabulated Undulator Example', 'Gaussian X-ray beam through a Beamline containing Imperfect Mirrors', 'NSLS-II SRX beamline', 'NSLS-II ESM beamline', 'Mask example', 'NSLS-II SMI beamline'): sim = fc.sr_sim_data(srw.SIM_TYPE, name) resp = fc.sr_get( 'pythonSource', { 'simulation_id': sim['models']['simulation']['simulationId'], 'simulation_type': srw.SIM_TYPE, }, raw_response=True, ) filename = '{}.py'.format(name) with open(str(pkunit.work_dir().join(filename)), 'wb') as f: f.write(resp.data) expect = pkio.read_text(pkunit.data_dir().join(filename)) pkeq(expect, resp.data)
def test_myapp(auth_fc): from pykern import pkunit from pykern.pkdebug import pkdlog, pkdexc, pkdp fc = auth_fc fc.sr_get('authGuestLogin', {'simulation_type': fc.sr_sim_type}) fc.sr_sim_data() fc.sr_logout() r = fc.sr_post('authEmailLogin', { 'email': '[email protected]', 'simulationType': fc.sr_sim_type }) fc.sr_email_confirm(fc, r) fc.sr_post( 'authCompleteRegistration', { 'displayName': 'abc', 'simulationType': fc.sr_sim_type, }, ) r = fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type}, raw_response=True) pkunit.pkeq(403, r.status_code)
def test_illegals(fc): from pykern.pkcollections import PKDict from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkexcept, pkre import copy d = fc.sr_sim_data() for x in ( (PKDict(name='new/sim'), ('newsim', '/folder')), (PKDict(name='some*sim'), ('somesim', '/folder')), (PKDict(folder='.foo'), ('name', '/foo')), (PKDict(name='s|&+?\'"im***\\'), ('sim', '/folder')), (PKDict(folder=''), 'blank folder'), (PKDict(name=''), 'blank name'), (PKDict(name='***'), 'blank name'), ): c = d.copy().pkupdate(folder='folder', name='name') r = fc.sr_post('newSimulation', c.pkupdate(x[0])) if 'error' in r: pkre(x[1], r.error) else: pkeq(r.models.simulation.name, x[1][0]) pkeq(r.models.simulation.folder, x[1][1])
def test_beam_solver(): """Ensure BeamSolver interface solves and produces output""" from pykern import pkio from pykern.pkunit import pkeq from rslinac.solver import BeamSolver f = _files() with pkunit.save_chdir_work(): pkio.write_text('Solenoid.txt', pkio.read_text(pkunit.data_dir().join('Solenoid.txt'))) solver = BeamSolver(f['ini'], f['input']) solver.solve() solver.save_output(f['output']) assert f['output'].exists() v = solver.get_structure_parameters(1) assert v[2] == 0.0006 solver.dump_bin('all-data.bin') for outfile in ('PARSED.TXT', 'test1.pid'): expect = pkio.read_text(pkunit.data_dir().join(outfile)) actual = pkio.read_text(pkunit.work_dir().join(outfile)) pkeq(expect, actual) solver.load_bin('all-data.bin') v = solver.get_structure_parameters(1) assert v[2] == 0.0006
def _op(): from sirepo import cookie pkeq(uid, cookie.get_user(checked=False)) for expect, key in cases: if expect is None: pkeq(False, cookie.has_key(key)) else: pkeq(expect, cookie.get_value(key))
def sr_sim_data(self, sim_name=None, sim_type=None): """Return simulation data by name Args: sim_name (str): case sensitive name ['Scooby Doo'] sim_type (str): app ['myapp'] Returns: dict: data """ from pykern import pkunit from pykern.pkdebug import pkdpretty self.sr_sim_type_set(sim_type) if not sim_name: sim_name = 'Scooby Doo' d = self.sr_post( 'listSimulations', PKDict( simulationType=self.sr_sim_type, search=PKDict({'simulation.name': sim_name}), ) ) assert 1 == len(d), \ 'listSimulations name={} returned count={}'.format(sim_name, len(d)) d = d[0].simulation res = self.sr_get_json( 'simulationData', PKDict( simulation_type=self.sr_sim_type, pretty='0', simulation_id=d.simulationId, ), ) pkunit.pkeq(sim_name, res.models.simulation.name) return res
def test_srw_cancel(fc): from pykern import pkunit, pkcompat import subprocess import time d = fc.sr_sim_data("Young's Double Slit Experiment", sim_type='srw') r = fc.sr_post( 'runSimulation', dict( forceRun=False, models=d.models, report='multiElectronAnimation', simulationId=d.models.simulation.simulationId, simulationType=d.simulationType, ), ) for _ in range(10): pkunit.pkok(r.state != 'error', 'expected error state: {}') if r.state == 'running': break time.sleep(r.nextRequestSeconds) r = fc.sr_post('runStatus', r.nextRequest) else: pkunit.pkfail('runStatus: failed to start running: {}', r) x = r.nextRequest r = fc.sr_post('runCancel', x) pkunit.pkeq('canceled', r.state) r = fc.sr_post('runStatus', x) pkunit.pkeq('canceled', r.state) o = pkcompat.from_bytes( subprocess.check_output(['ps', 'axww'], stderr=subprocess.STDOUT), ) o = list(filter(lambda x: 'mpiexec' in x, o.split('\n'))) pkunit.pkok( not o, 'found "mpiexec" after cancel in ps={}', '\n'.join(o), )
def test_importer(import_req): from pykern.pkunit import pkeq from sirepo.template import opal from sirepo.template import opal_parser import re with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*.in')): error = None try: data, files = opal_parser.parse_file(pkio.read_text(fn), filename=fn) except Exception as e: pkdlog(pkdexc()) error = str(e) if error: actual = error else: data['report'] = 'animation' actual = opal.python_source_for_model(data, None) outfile = re.sub(r'\.in$', '.txt', fn.basename) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_from_elegant_to_madx_and_back(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant from sirepo.template.elegant import ElegantMadxConverter with pkunit.save_chdir_work() as d: for name in ('SPEAR3', 'Compact Storage Ring', 'Los Alamos Proton Storage Ring'): data = _example_data(name) actual = ElegantMadxConverter().to_madx_text(data) outfile = name.lower().replace(' ', '-') + '.madx' pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile)) lattice = ElegantMadxConverter().from_madx_text(actual) outfile = name.lower().replace(' ', '-') + '.lte' actual = elegant.python_source_for_model(lattice, None) pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile))
def test_1(): import shutil from pykern import pkconfig, pkunit, pkio from pykern.pkunit import pkeq from pykern.pkdebug import pkdp from sirepo import srunit srunit.flask_client() from sirepo import cookie cookie.init_mock() cookie.init('x') with pkunit.pkexcept('Unauthorized'): cookie.get_user() with pkunit.pkexcept('Unauthorized'): cookie.get_user(checked=False) cookie.set_sentinel() cookie.set_user('abc') cookie.set_value('hi', 'hello') r = _Response(status_code=200) cookie.save_to_cookie(r) pkeq('sirepo_dev', r.args[0]) pkeq(False, r.kwargs['secure']) pkeq('abc', cookie.get_user()) cookie.clear_user() cookie.unchecked_remove('hi') pkeq(None, cookie.get_user(checked=False)) cookie.init('sirepo_dev={}'.format(r.args[1])) pkeq('hello', cookie.get_value('hi')) pkeq('abc', cookie.get_user())
def test_has_file_extension(): from pykern.pkunit import pkeq from pykern import pkio pkeq(True, pkio.has_file_extension('x.ABC', 'abc')) pkeq(True, pkio.has_file_extension(py.path.local('x.abc'), ('abc', 'def')))