def test_simple(capsys): from pykern import pkunit import pykern.pkcli.test with pkunit.save_chdir_work() as d: t = d.join('tests') pkunit.data_dir().join('tests').copy(t) with pkunit.pkexcept('FAILED=1 passed=1'): pykern.pkcli.test.default_command() o, e = capsys.readouterr() pkunit.pkre('1_test.py pass', o) pkunit.pkre('2_test.py FAIL', o) t.join('2_test.py').rename(t.join('2_test.py-')) pkunit.pkre('passed=1', pykern.pkcli.test.default_command()) o, e = capsys.readouterr() pkunit.pkre('1_test.py pass', o) pkunit.pkre('passed=1', pykern.pkcli.test.default_command('tests/1_test.py')) o, e = capsys.readouterr() pkunit.pkre('1_test.py pass', o) t.join('2_test.py-').rename(t.join('2_test.py')) t.join('1_test.py').rename(t.join('1_test.py-')) with pkunit.pkexcept('FAILED=1 passed=0'): pykern.pkcli.test.default_command() o, e = capsys.readouterr() pkunit.pkre('2_test.py FAIL', o) pkunit.pkre('x = 1 / 0', o)
def test_import(): from pykern import pkjson from pykern.pkunit import pkeq from sirepo.template import flash_parser import re def _parse_config(fn): return flash_parser.ConfigParser().parse(pkio.read_text(fn)) def _parse_par(fn): data_file = fn.basename.replace('-flash.par', '') return flash_parser.ParameterParser().parse( pkjson.load_any( pkio.read_text( pkunit.data_dir().join(f'{data_file}-sirepo-data.json'))), pkio.read_text(fn), ) with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if re.search(r'-Config$', fn.basename): parser = _parse_config elif re.search(r'flash.par$', fn.basename): parser = _parse_par else: continue try: actual = pkjson.dump_pretty(parser(fn)) except Exception as e: pkdlog(pkdexc()) actual = str(e) outfile = f'{fn.basename}.out' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_importer(import_req): from pykern import pkcollections from pykern import pkjson from pykern.pkunit import pkeq from sirepo.template import zgoubi import sirepo.sim_data with pkunit.save_chdir_work() as w: for fn in pkio.sorted_glob(pkunit.data_dir().join('*.dat')): error = None try: data = zgoubi.import_file(import_req(fn), unit_test_mode=True) sirepo.sim_data.get_class('zgoubi').fixup_old_data(data) #TODO(pjm): easier way to convert nested dict to pkcollections.Dict? data = pkcollections.json_load_any(pkjson.dump_pretty(data)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: actual = zgoubi.python_source_for_model(data) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, w.join(outfile))
def test_from_elegant_to_madx_and_back(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant, madx, madx_converter, madx_parser with pkunit.save_chdir_work() as d: for name in ('SPEAR3', 'Compact Storage Ring', 'Los Alamos Proton Storage Ring'): data = _example_data(name) mad = madx_parser.parse_file( elegant.python_source_for_model(data, 'madx')) madx._fixup_madx(mad) outfile = name.lower().replace(' ', '-') + '.madx' actual = madx.python_source_for_model(mad, None) pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile)) data = madx_parser.parse_file(actual) lattice = madx_converter.from_madx(elegant.SIM_TYPE, data) outfile = name.lower().replace(' ', '-') + '.lte' actual = elegant.python_source_for_model(lattice, None) pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, d.join(outfile))
def _code(files=None): from pykern import pkunit, pkio, pkjson from pykern.pkdebug import pkdp import inspect import sirepo.lib for i, s in enumerate( pkio.sorted_glob(pkunit.data_dir().join( f'{inspect.stack()[1].function.split("_")[1]}_*', ))): t = s.basename.split('_')[0] d = sirepo.lib.Importer(t).parse_file( pkio.sorted_glob(s.join('first*'))[0]) d2 = d.copy() d2.pkdel('version') for k in [k for k in d2.keys() if '_SimData__' in k]: d2.pkdel(k) pkunit.file_eq(s.join('out.json'), d2) w = pkunit.work_dir().join(s.basename) r = d.write_files(w) for o in pkio.sorted_glob(pkunit.data_dir().join(s.basename, '*.out')): pkunit.file_eq(o, actual_path=w.join(o.basename).new(ext='')) if files: pkunit.pkok( set(files[i]).issubset(set(r.output_files)), 'expecting files={} to be subset of output_files={}', files, r.output_files, )
def test_parse_madx_file(): from pykern import pkio, pkjson from pykern.pkunit import pkeq from sirepo.template import madx, madx_parser for name in ('particle_track', ): actual = madx_parser.parse_file( pkio.read_text(pkunit.data_dir().join(f'{name}.madx'))) madx._fixup_madx(actual) del actual['version'] expect = pkjson.load_any(pkunit.data_dir().join(f'{name}.json')) pkeq(expect, actual)
def test_parse_madx_file(): from pykern import pkio, pkjson from pykern.pkunit import pkeq from sirepo.template import madx, madx_parser with pkunit.save_chdir_work(): for name in ('particle_track', 'alba'): actual = madx_parser.parse_file(pkio.read_text( pkunit.data_dir().join(f'{name}.madx'))) del actual['version'] outfile = f'{name}.json' pkjson.dump_pretty(actual, outfile) expect = pkjson.load_any(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_import_elegant_export_madx(import_req): from pykern.pkunit import pkeq, file_eq from sirepo.template import elegant from sirepo.template.elegant import ElegantMadxConverter data = elegant.import_file(import_req(pkunit.data_dir().join('test1.ele'))) data = elegant.import_file(import_req(pkunit.data_dir().join('test1.lte')), test_data=data) # this is updated from javascript unfortunately data.models.bunch.longitudinalMethod = '3' actual = ElegantMadxConverter().to_madx_text(data) file_eq( 'test1.madx', actual=actual, )
def test_run_beam_solver(): """Ensure pyhellweg.run_beam_solver produces output and does not crash""" from pykern import pkio from pykern.pkunit import pkeq from rslinac.pkcli import beam_solver f = _files() with pkunit.save_chdir_work(): pkio.write_text('Solenoid.txt', pkio.read_text(pkunit.data_dir().join('Solenoid.txt'))) beam_solver.run(f['ini'], f['input'], f['output']) assert f['output'].exists() for outfile in ('PARSED.TXT', 'test1.pid'): expect = pkio.read_text(pkunit.data_dir().join(outfile)) actual = pkio.read_text(pkunit.work_dir().join(outfile)) pkeq(expect, actual)
def _do(fc, file_ext, parse): from pykern.pkcollections import PKDict from pykern import pkio, pkcompat from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp, pkdlog from pykern.pkunit import pkeq, pkfail, pkok, pkre import re for suffix in (('',) if file_ext == 'py' else ('', ' 2', ' 3')): for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): pkdlog('file={}', f) json = pkcompat.from_bytes(parse(f)) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.sr_get_root(sim_type) is_dev = 'deviance' in f.basename res = fc.sr_post_form( 'importFile', PKDict(folder='/importer_test'), PKDict(simulation_type=sim_type), file=f, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkre(expect, res.error) continue elif file_ext == 'py': sim_name = f.purebasename else: sim_name = pkcollections.json_load_any(json).models.simulation.name assert 'models' in res, \ f'file={f} res={res}' pkeq(sim_name + suffix, res.models.simulation.name)
def test_checked_call(): from pykern import pkunit from pykern.pkunit import pkeq import sys import subprocess with pkunit.save_chdir_work(): cmd = [sys.executable, str(pkunit.data_dir().join('p1.py'))] for i, a in enumerate(( ('normal', 0), ('exit-1', 1), ('divide-zero', 1), ('normal-rank-all', 0), ('divide-zero-rank-2', 86), ('exit-13-rank-0', 13), )): f = '{}.out'.format(i) with open(f, 'w') as o: c = cmd + [a[0]] print(a[0]) if 'rank' in a[0]: c = ['mpiexec', '-n', '4'] + c actual = subprocess.call( c, stdout=o, stderr=subprocess.STDOUT, ) pkeq(a[1], actual, '{}: exit({})\n{}', ' '.join(c), actual, open(f).read())
def test_validate_safe_zip(): from sirepo.template import srw zip_dir = str(pkunit.data_dir() + '/zip_dir') # Reject a zip with no index file with pkunit.pkexcept(AssertionError): srw._validate_safe_zip(zip_dir + '/bad_zip_no_index.zip', zip_dir, srw.validate_magnet_data_file) # Reject a zip with an incomplete index file with pkunit.pkexcept(AssertionError): srw._validate_safe_zip(zip_dir + '/bad_zip_incomplete_index.zip', zip_dir, srw.validate_magnet_data_file) # Reject a zip with entries in index file that are not in zip with pkunit.pkexcept(AssertionError): srw._validate_safe_zip(zip_dir + '/bad_zip_extra_index.zip', zip_dir, srw.validate_magnet_data_file) # Reject a zip with unacceptable file types with pkunit.pkexcept(AssertionError): srw._validate_safe_zip(zip_dir + '/bad_zip_bad_types.zip', zip_dir, srw.validate_magnet_data_file) # Finally, make sure the included measurement files are OK # We're not really extracting them so just send the test directory as target for f in [ 'magn_meas_chx.zip', 'magn_meas_esm.zip', 'magn_meas_fmx.zip', 'magn_meas_srx.zip', 'magn_meas_u20_hxn.zip', 'magn_meas_chx.zip' ]: srw._validate_safe_zip(pkresource.filename('template/srw/' + f, srw), zip_dir, srw.validate_magnet_data_file)
def sr_post_form(self, route_or_uri, data, params=None, raw_response=False, file=None, **kwargs): """Posts form data to route_or_uri to server with data Args: route_or_uri (str): identifies route in schema-common.json data (dict): will be formatted as JSON params (dict): optional params to route_or_uri file (object): if str, will look in data_dir, else assumed py.path Returns: object: Parsed JSON result """ from pykern import pkunit, pkconfig if file: p = file if isinstance(p, pkconfig.STRING_TYPES): p = pkunit.data_dir().join(p) data.file = (open(str(p), 'rb'), p.basename) return self.__req(route_or_uri, params, PKDict(), lambda r: self.post(r, data=data), raw_response=raw_response, **kwargs)
def test_elegant_upload_sdds(fc): from pykern import pkio from pykern import pkunit from pykern.pkcollections import PKDict from pykern.pkdebug import pkdp import sirepo.sim_data d = fc.sr_sim_data('Compact Storage Ring') r = fc.sr_post_form( 'uploadFile', params=PKDict( simulation_type=fc.sr_sim_type, simulation_id=d.models.simulation.simulationId, file_type='bunchFile-sourceFile', ), data=PKDict(), # somename.bun was created with: # d.models.bunch.n_particles_per_bunch = 50 file=pkunit.data_dir().join('somename.bun'), ) import sirepo.srdb #TODO(robnagler) make easier to get at this in tests g = pkio.sorted_glob(sirepo.srdb.root().join('user', fc.sr_uid, 'elegant', 'lib', '*')) pkunit.pkeq(1, len(g)) pkunit.pkeq('bunchFile-sourceFile.somename.bun', g[0].basename)
def test_uniquify_beamline(): from pykern import pkio from pykern import pkunit from pykern.pkunit import pkeq from pykern import pkjson from sirepo.template import madx d = pkjson.load_any(pkunit.data_dir().join('in.json')) madx.uniquify_elements(d) pkeq(1, len(d.models.beamlines), 'expecting one beamline={}', d.models.beamlines) l = d.models.beamlines[0]['items'] pkeq(len(list(set(l))), len(l), 'expecting all unique items={}', l) e = {e._id: e.original_id for e in d.models.elements} r = [e[i] for i in d.models.beamlines[0]['items']] pkeq( [ 2, 2, 5, 5, 5, 2, 2, 5, 5, 5, 2, ], r, 'expecting proper reflection of sub-lines. ids of original elements: {}', r)
def test_data_dir(): expect = _expect('pkunit_data') d = pkunit.data_dir() assert isinstance(d, PY_PATH_LOCAL_TYPE), \ 'Verify type of data_dir is same as returned by py.path.local' assert d == expect, \ 'Verify data_dir has correct return value'
def test_validate_safe_zip(): from sirepo.template.template_common import validate_safe_zip from sirepo.template import srw from sirepo.template.srw import validate_magnet_data_file zip_dir = str(pkunit.data_dir() + '/zip_dir') # Reject a zip with no index file with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_no_index.zip', zip_dir, validate_magnet_data_file) # Reject a zip with an incomplete index file with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_incomplete_index.zip', zip_dir, validate_magnet_data_file) # Reject a zip with entries in index file that are not in zip with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_extra_index.zip', zip_dir, validate_magnet_data_file) # Reject a zip with unacceptable file types with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_bad_types.zip', zip_dir, validate_magnet_data_file) # Finally, make sure the included measurement files are OK # We're not really extracting them so just send the test directory as target validate_safe_zip(pkresource.filename('template/srw/magn_meas_chx.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_esm.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_fmx.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_srx.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_u20_hxn.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magnetic_measurements.zip', srw), zip_dir, validate_magnet_data_file)
def test_filename(): import importlib import os.path from pykern import pkunit from pykern import pkresource d = pkunit.data_dir() t1 = importlib.import_module(d.basename + '.t1') assert t1.somefile().startswith('anything'), \ 'When somefile is called, it should return the "anything" file' n = pkresource.filename('test.yml', pkresource) sn = [n] def _tail(): (sn[0], tail) = os.path.split(sn[0]) return tail assert 'test.yml' == _tail(), \ 'nth of resource name is name passed to pkresource' assert 'package_data' == _tail(), \ 'n-1th resource is always "package_data"' assert 'pykern' == _tail(), \ 'n-2th resource is root package of passed in context' with pytest.raises(IOError): # Should not find somefile, because that's in a different context pkresource.filename('somefile', pkresource) assert pkresource.filename('somefile', t1.somefile), \ 'Given any object, should fine resource in root package of that object'
def test_importer(import_req): from pykern.pkcollections import PKDict from sirepo.template import elegant import sirepo.lib for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('.ele.lte'): continue k = PKDict() pkdlog('file={}', fn) if fn.basename.startswith('deviance-'): try: data = elegant.import_file(import_req(fn)) except Exception as e: k.actual = f'{e}\n' else: k.actual = 'did not raise exception' elif fn.ext == '.lte': data = elegant.import_file(import_req(fn)) data['models']['commands'] = [] g = elegant._Generate(data) g.sim() j = g.jinja_env k.actual = j.rpn_variables + j.lattice else: f = sirepo.lib.Importer('elegant').parse_file(fn).write_files(pkunit.work_dir()) k.actual_path = f.commands pkunit.file_eq(fn.basename + '.txt', **k)
def test_validate_safe_zip(): from sirepo.template.template_common import validate_safe_zip from sirepo.template.srw import validate_magnet_data_file zip_dir = str(pkunit.data_dir() + '/zip_dir') # Reject a zip that would overwrite files with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_would_overwrite.zip', zip_dir) # Reject a zip that would extract a file above the target directory with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_external_file.zip', zip_dir) # Reject a zip with unacceptably large file with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_large_file.zip', zip_dir) # Reject a zip with executable permissions set with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_executables.zip', zip_dir) # Finally, accept a zip file known to be safe validate_safe_zip(zip_dir + '/good_zip.zip', zip_dir, validate_magnet_data_file)
def test_load_file(): """Test values are unicode""" from pykern import pkunit from pykern import pkyaml y = pkyaml.load_file(pkunit.data_dir().join('conf1.yml')) _assert_unicode(y)
def _do(file_ext, parse): from pykern import pkio from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkfail, pkok from sirepo import srunit import re fc = srunit.flask_client() for suffix in '', ' (2)', ' (3)': for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): json, stream = parse(f) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.get('/{}'.format(sim_type)) is_dev = 'deviance' in f.basename if not is_dev: sim_name = pkcollections.json_load_any(json).models.simulation.name res = fc.sr_post_form( 'importFile', { 'file': (stream, f.basename), 'folder': '/importer_test', }, {'simulation_type': sim_type}, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkeq(expect, res.error) continue pkeq(sim_name + suffix, res.models.simulation.name)
def _do(file_ext, parse): from pykern import pkio from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkfail, pkok from sirepo import srunit import re fc = srunit.flask_client(sim_types='srw:myapp') fc.sr_login_as_guest() for suffix in '', ' 2', ' 3': for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): json, stream = parse(f) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.sr_get_root(sim_type) is_dev = 'deviance' in f.basename if not is_dev: sim_name = pkcollections.json_load_any(json).models.simulation.name res = fc.sr_post_form( 'importFile', { 'file': (stream, f.basename), 'folder': '/importer_test', }, {'simulation_type': sim_type}, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkeq(expect, res.error) continue pkeq(sim_name + suffix, res.models.simulation.name)
def _before_request(fc): target = srunit.server.app.sirepo_db_dir.join('beaker', 'container_file', filename[0:1], filename[0:2], filename) pkio.mkdir_parent_only(target) shutil.copy(str(pkunit.data_dir().join(filename)), str(target))
def test_oauth_conversion(monkeypatch): """See `x_test_oauth_conversion_setup`""" fc, sim_type = _fc() from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkok, pkre, pkeq from pykern import pkunit from pykern import pkio from sirepo.auth import github from sirepo import github_srunit from sirepo import server import re import shutil pkio.unchecked_remove(server._app.sirepo_db_dir) pkunit.data_dir().join('db').copy(server._app.sirepo_db_dir) fc.cookie_jar.clear() fc.set_cookie( 'localhost', 'sirepo_dev', 'Z0FBQUFBQmN2bGQzaGc1MmpCRkxIOWNpWi1yd1JReXUxZG5FV2VqMjFwU2w2cmdOSXhlaWVkOC1VUzVkLVR5NzdiS080R3p1aGUwUEFfdmpmdDcxTmJlOUR2eXpJY2l1YUVWaUVVa3dCYXpnZGIwTV9fei1iTWNCdkp0eXJVY0Ffenc2SVoxSUlLYVM=' ) oc = github_srunit.MockOAuthClient(monkeypatch, 'emailer') uid = fc.sr_auth_state(isLoggedIn=False, method='github').uid r = fc.sr_post('listSimulations', {'simulationType': sim_type}) pkeq('loginWith', r.srException.routeName) pkeq('github', r.srException.params.method) r = fc.sr_get('authGithubLogin', {'simulation_type': sim_type}) state = oc.values.state pkeq(302, r.status_code) pkre(state, r.headers['location']) fc.sr_get('authGithubAuthorized', query={'state': state}) r = fc.sr_post( 'authEmailLogin', { 'email': '*****@*****.**', 'simulationType': sim_type }, ) fc.sr_auth_state(isLoggedIn=True, method='github', uid=uid) fc.get(r.url) fc.sr_auth_state( isLoggedIn=True, method='email', uid=uid, userName='******', )
def test_importer(): from pykern import pkcollections from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(FlaskRequest(fn)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice( data, elegant._build_filename_map(data), elegant._build_beamline_map(data), pkcollections.Dict(), ), ) else: data2 = elegant.import_file(FlaskRequest( '{}.lte'.format(fn)), test_data=data) actual = elegant._generate_commands( data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), pkcollections.Dict(), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual pkeq(expect, actual)
def _parse_par(fn): data_file = fn.basename.replace('-flash.par', '') return flash_parser.ParameterParser().parse( pkjson.load_any( pkio.read_text( pkunit.data_dir().join(f'{data_file}-sirepo-data.json'))), pkio.read_text(fn), )
def test_run_path_as_module(): import sys from pykern import pkunit from pykern import pkrunpy m = pkrunpy.run_path_as_module(pkunit.data_dir().join('f1.py')) assert m.func1() == sys.modules, \ 'When imported, should be able to call function within module'
def test_py_path(): from pykern import pkunit from pykern import pkio from pykern.pkunit import pkeq with pkunit.save_chdir_work(): d = pkunit.data_dir() pkeq(d, pkio.py_path(d))
def test_importer(): from pykern import pkcollections from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import elegant with pkunit.save_chdir_work(): for fn in pkio.sorted_glob(pkunit.data_dir().join('*')): if not pkio.has_file_extension(fn, ('ele', 'lte')) \ or fn.basename.endswith('ele.lte'): continue error = None try: data = elegant.import_file(FlaskRequest(fn)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: if pkio.has_file_extension(fn, 'lte'): data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice( data, elegant._build_filename_map(data), elegant._build_beamline_map(data), pkcollections.Dict(), ), ) else: data2 = elegant.import_file(FlaskRequest('{}.lte'.format(fn)), test_data=data) actual = elegant._generate_commands( data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), pkcollections.Dict(), ) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual pkeq(expect, actual)
def test_is_caller_main(): m1 = pkunit.import_module_from_data_dir('p1.m1') assert not m1.is_caller_main(), \ 'When not called from main, is_caller_main is False' with pkio.save_chdir(pkunit.data_dir()): subprocess.check_call([ sys.executable, '-c', 'from p1 import m1; assert m1.is_caller_main()' ])
def test_delete_user(): from pykern import pkio from pykern import pkunit from sirepo import auth_db from sirepo import simulation_db from sirepo import srunit from sirepo.sim_api import jupyterhublogin import sirepo.pkcli.admin import sirepo.srdb pkio.unchecked_remove(sirepo.srdb.root()) pkunit.data_dir().join('db').copy(sirepo.srdb.root()) sirepo.pkcli.admin.delete_user('IYgnLlSy') with auth_db.session_and_lock(): _is_empty(jupyterhublogin.JupyterhubUser.search_all_by()) _is_empty(auth_db.UserRegistration.search_all_by()) _is_empty(pkio.sorted_glob(jupyterhublogin.cfg.user_db_root_d.join('*'))) _is_empty(pkio.sorted_glob(simulation_db.user_path().join('*')))
def test_is_caller_main(): m1 = pkunit.import_module_from_data_dir('p1.m1') assert not m1.is_caller_main(), \ 'When not called from main, is_caller_main is False' with pkio.save_chdir(pkunit.data_dir()): subprocess.check_call([ sys.executable, '-c', 'from p1 import m1; assert m1.is_caller_main()'])
def _main(root_pkg, argv): sys.argv[:] = ['pkcli_test'] sys.argv.extend(argv) dd = str(pkunit.data_dir()) try: sys.path.insert(0, dd) return pkcli.main(root_pkg) finally: if sys.path[0] == dd: sys.path.pop(0)
def _before_request(fc): target = srunit.server._app.sirepo_db_dir.join('beaker', 'container_file', filename[0:1], filename[0:2], filename) pkio.mkdir_parent_only(target) shutil.copy(str(pkunit.data_dir().join(filename)), str(target)) for h in header.split('; '): x = h.split('=') fc.set_cookie('', *x)
def _project_dir(project): """Copy "data_dir/project" to "work_dir/project" Initializes as a git repo. Args: project (str): subdirectory name Returns: py.path.local: working directory""" d = pkunit.empty_work_dir().join(project) pkunit.data_dir().join(d.basename).copy(d) with pkio.save_chdir(d): check_call(['git', 'init', '.']) check_call(['git', 'config', 'user.email', '*****@*****.**']) check_call(['git', 'config', 'user.name', 'pykern']) check_call(['git', 'add', '.']) # Need a commit check_call(['git', 'commit', '-m', 'n/a']) yield d
def test_import_module_from_data_dir(monkeypatch): real_data_dir = pkunit.data_dir() fake_data_dir = None def mock_data_dir(): return fake_data_dir monkeypatch.setattr(pkunit, 'data_dir', mock_data_dir) fake_data_dir = str(real_data_dir.join('import1')) assert 'imp1' == pkunit.import_module_from_data_dir('p1').v, \ 'import1/p1 should be from "imp1"' fake_data_dir = str(real_data_dir.join('import2')) assert 'imp2' == pkunit.import_module_from_data_dir('p1').v, \ 'import2/p1 should be from "imp2"'
def test_sirepo_parser(): with pkunit.save_chdir_work(): for b in ['SRWLIB_VirtBL_LCLS_SXR_01']: base_py = '{}.py'.format(b) code = pkio.read_text(pkunit.data_dir().join(base_py)) error, actual = import_python( code, tmp_dir='.', lib_dir='.', user_filename=r'c:\x\{}.y'.format('SRWLIB_VirtBL_LCLS_SXR_01'), ) assert not error, \ '{}: should be valid input'.format(base_py) pkunit.assert_object_with_json(b, actual)
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import shadow with pkunit.save_chdir_work(): for name in ('Complete Beamline', 'Wiggler'): data = _example_data(name) data['report'] = 'watchpointReport{}'.format(data.models.beamline[-1].id) actual = shadow._generate_parameters_file(data) outfile = data.models.simulation.simulationId + '.txt' pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) pkeq(expect, actual)
def test_importer(): from sirepo.template.srw_importer import import_python from pykern import pkio from pykern import pkresource from pykern import pkunit from pykern.pkdebug import pkdc, pkdp import glob import py _TESTS = { # Values are optional arguments: 'amx': ('amx', None), 'amx_bl2': ('amx', '--op_BL=2'), 'amx_bl3': ('amx', '--op_BL=3'), 'amx_bl4': ('amx', '--op_BL=4'), 'chx': ('chx', None), 'chx_fiber': ('chx_fiber', None), 'exported_chx': ('exported_chx', None), 'exported_gaussian_beam': ('exported_gaussian_beam', None), 'exported_undulator_radiation': ('exported_undulator_radiation', None), 'lcls_simplified': ('lcls_simplified', None), 'lcls_sxr': ('lcls_sxr', None), 'sample_from_image': ('sample_from_image', None), 'smi_es1_bump_norm': ('smi', '--beamline ES1 --bump --BMmode Norm'), 'smi_es1_nobump': ('smi', '--beamline ES1'), 'smi_es2_bump_lowdiv': ('smi', '--beamline ES2 --bump --BMmode LowDiv'), 'smi_es2_bump_norm': ('smi', '--beamline ES2 --bump --BMmode Norm'), 'srx': ('srx', None), 'srx_bl2': ('srx', '--op_BL=2'), 'srx_bl3': ('srx', '--op_BL=3'), 'srx_bl4': ('srx', '--op_BL=4'), } dat_dir = py.path.local(pkresource.filename('template/srw/', import_python)) with pkunit.save_chdir_work(): work_dir = py.path.local('.') for f in glob.glob(str(dat_dir.join('mirror_*d.dat'))): py.path.local(f).copy(work_dir) py.path.local(str(dat_dir.join('sample.tif'))).copy(work_dir) for b in sorted(_TESTS.keys()): base_py = '{}.py'.format(_TESTS[b][0]) code = pkio.read_text(pkunit.data_dir().join(base_py)) actual = import_python( code, tmp_dir=str(work_dir), lib_dir=str(work_dir), user_filename=r'c:\anything\{}.anysuffix'.format(_TESTS[b][0]), arguments=_TESTS[b][1], ) actual['version'] = 'IGNORE-VALUE' pkunit.assert_object_with_json(b, actual)
def _import(fc): from pykern import pkio from pykern import pkunit import zipfile res = [] for f in pkio.sorted_glob(pkunit.data_dir().join('*.zip')): with zipfile.ZipFile(str(f)) as z: expect = sorted(z.namelist() + ['run.py']) d = fc.sr_post_form( 'importFile', { 'file': (open(str(f), 'rb'), f.basename), 'folder': '/exporter_test', }, ) res.append((d.simulationType, d.models.simulation.name, expect)) return res
def test_filename(): d = pkunit.data_dir() t1 = importlib.import_module(d.basename + '.t1') assert t1.somefile().startswith('anything'), \ 'When somefile is called, it should return the "anything" file' n = pkresource.filename('test.yml', pkresource) sn = [n] def _tail(): (sn[0], tail) = os.path.split(sn[0]) return tail assert 'test.yml' == _tail(), \ 'nth of resource name is name passed to pkresource' assert 'package_data' == _tail(), \ 'n-1th resource is always "package_data"' assert 'pykern' == _tail(), \ 'n-2th resource is root package of passed in context' with pytest.raises(IOError): # Should not find somefile, because that's in a different context pkresource.filename('somefile', pkresource) assert pkresource.filename('somefile', t1.somefile), \ 'Given any object, should fine resource in root package of that object'
def test_generate_python(): from pykern import pkio from pykern.pkunit import pkeq from sirepo.template import srw from sirepo import srunit fc = srunit.flask_client() fc.get('/{}'.format(srw.SIM_TYPE)) for name in ('NSLS-II CHX beamline', 'Sample from Image', 'Boron Fiber (CRL with 3 lenses)', 'Tabulated Undulator Example', 'Gaussian X-ray beam through a Beamline containing Imperfect Mirrors', 'NSLS-II SRX beamline', 'NSLS-II ESM beamline', 'Mask example', 'NSLS-II SMI beamline'): sim = fc.sr_sim_data(srw.SIM_TYPE, name) resp = fc.sr_get( 'pythonSource', { 'simulation_id': sim['models']['simulation']['simulationId'], 'simulation_type': srw.SIM_TYPE, }, raw_response=True, ) filename = '{}.py'.format(name) with open(str(pkunit.work_dir().join(filename)), 'wb') as f: f.write(resp.data) expect = pkio.read_text(pkunit.data_dir().join(filename)) pkeq(expect, resp.data)
def test_importer(): from sirepo.template import elegant with pkunit.save_chdir_work(): for filename in _FILES: error, data = elegant.import_file(TestFlaskRequest(filename)) outfile = '{}.txt'.format(filename) if error: actual = error else: if '.lte' in filename: data['models']['commands'] = [] actual = '{}{}'.format( elegant._generate_variables(data), elegant.generate_lattice(data, elegant._build_filename_map(data), elegant._build_beamline_map(data), {})) else: err2, data2 = elegant.import_file(TestFlaskRequest('{}.lte'.format(filename)), test_data=data) actual = elegant._generate_commands(data2, elegant._build_filename_map(data2), elegant._build_beamline_map(data2), {}) pkio.write_text(outfile, actual) expect = pkio.read_text(pkunit.data_dir().join(outfile)) #TODO(pjm): this takes too long if there are a lot of diffs #assert expect == actual if expect != actual: assert False
def test_importer(): from sirepo.importer import import_python dat_dir = py.path.local(pkresource.filename('static/dat/', import_python)) with pkunit.save_chdir_work(): work_dir = py.path.local('.') for f in glob.glob(str(dat_dir.join('mirror_*d.dat'))): py.path.local(f).copy(work_dir) for b in sorted(_TESTS.keys()): base_py = '{}.py'.format(_TESTS[b][0]) code = pkio.read_text(pkunit.data_dir().join(base_py)) error, actual = import_python( code, tmp_dir=str(work_dir), lib_dir=str(work_dir), user_filename=r'c:\anything\{}.anysuffix'.format(_TESTS[b][0]), arguments=_TESTS[b][1], ) assert not error, \ '{}: should import with an error: {}'.format(base_py, error) actual['version'] = 'IGNORE-VALUE' assert not error, \ '{}: should be valid input'.format(base_py) pkunit.assert_object_with_json(b, actual)
def test_1(): d = pkunit.data_dir() ## Testing actual SRW calculations ##Reading SRW data SPECTRUM IFileName="Spectrum.txt" f=open(str(d.join(IFileName)),"r")#,1000) e_p=[] I_rad=[] for line in f.readlines(): words = line.split() e_p.append(words[0]) I_rad.append(words[1]) I_radf=map(float,I_rad) maxI=max(I_radf) pkdc(I_radf) print('Spectral Amplitude, ph/s/mrad2',maxI) pkdc(I_radf.index(max(I_radf))) maxIn=maxelements(I_radf) (maxV, maxI)=FindingArrayMaxima(I_radf,5) print(maxI, maxV) f.close() ##Reading SRW data TRAJECTORY IFileName="Trajectory.txt" f=open(str(d.join(IFileName)),"r")#,10000) z_dist=[] x_traj=[] for line in f.readlines(): words = line.split() z_dist.append(words[0]) x_traj.append(words[1]) x_trajectory=map(float, x_traj) z_distance=map(float, z_dist) minX=min(x_trajectory) maxX=max(x_trajectory) minZ=min(z_distance) maxZ=max(z_distance) print ('Length of ID, m', maxZ-minZ) print('Oscillation Amplitude, mm',(maxX-minX)/2) L_trajectory=Path_Length(z_distance, x_trajectory) print('Length of Trajectory, m', L_trajectory) f.close() ##Plotting plot(e_p,I_rad) j=0 for i in maxI: plt.scatter(e_p[i], maxV[j], color='red') j=j+1 # title(TitleP) # xlabel(Xlab) # ylabel(Ylab) grid() plt.show() plot(z_dist,x_trajectory,'.b',linestyle="-") (maxVt, maxIt)=FindingArrayMaxima(map(float,x_trajectory),20) pkdc(maxIt, maxVt) j=0 for i in maxIt: plt.scatter(z_dist[i], maxVt[j], color='red') j=j+1 grid() plt.show()
def test_load_file(): """Test values are unicode""" y = pkyaml.load_file(pkunit.data_dir().join('conf1.yml')) _assert_unicode(y)
def _before_request(fc): target = srunit.server.app.sirepo_db_dir.join( 'beaker', 'container_file', filename[0:1], filename[0:2], filename) pkio.mkdir_parent_only(target) shutil.copy(str(pkunit.data_dir().join(filename)), str(target))
def test_run_path_as_module(): m = pkrunpy.run_path_as_module(pkunit.data_dir().join('f1.py')) assert m.func1() == sys.modules, \ 'When imported, should be able to call function within module'
def read(self): with open(str(pkunit.data_dir().join(self.filename))) as f: return f.read()
def ReadYaml(filename): d = pkunit.data_dir() y = pkyaml.load_file(d.join(filename)) return y