def test_json_load_any(): """Validate json_load_any()""" import json j = json.dumps({'a': 33}) j2 = pkcollections.json_load_any(j) pkok( 33 == j2.a, '{}: j2.a is not 33', j2.a, ) j = json.dumps({'a': 33, 'b': {'values': 'will collide, but ok'}}) j2 = pkcollections.json_load_any(j) pkcollections.json_load_any(j, object_pairs_hook=pkcollections.Dict)
def test_json_load_any(): """Validate json_load_any()""" import json j = json.dumps({'a': 33}) j2 = pkcollections.json_load_any(j) pkeq( 33, j2.a, '{}: j2.a is not 33', j2.a, ) j = json.dumps({'a': 33, 'b': {'values': 'will collide, but ok'}}) j2 = pkcollections.json_load_any(j) pkcollections.json_load_any(j, object_pairs_hook=pkcollections.Dict)
def test_prepare_aux_files(): from sirepo.template import template_common from pykern.pkdebug import pkdp from pykern import pkcollections import sirepo.auth import sirepo.auth.guest sirepo.auth.login(sirepo.auth.guest) # Needed to initialize simulation_db data = pkcollections.json_load_any('''{ "simulationType": "srw", "models": { "simulation": { "sourceType": "t" }, "tabulatedUndulator": { "undulatorType": "u_t", "magneticFile": "magnetic_measurements.zip" }, "beamline": { } }, "report": "intensityReport" }''') d = pkunit.work_dir() template_common.copy_lib_files(data, None, d)
def _do(file_ext, parse): from pykern import pkio from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkfail, pkok from sirepo import srunit import re fc = srunit.flask_client(sim_types='srw:myapp') fc.sr_login_as_guest() for suffix in '', ' 2', ' 3': for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): json, stream = parse(f) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.sr_get_root(sim_type) is_dev = 'deviance' in f.basename if not is_dev: sim_name = pkcollections.json_load_any(json).models.simulation.name res = fc.sr_post_form( 'importFile', { 'file': (stream, f.basename), 'folder': '/importer_test', }, {'simulation_type': sim_type}, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkeq(expect, res.error) continue pkeq(sim_name + suffix, res.models.simulation.name)
def t(): from sirepo.template import srw from pykern import pkcollections # Needed to initialize simulation_db data = pkcollections.json_load_any('''{ "models": { "simulation": { "sourceType": "t" }, "tabulatedUndulator": { "magneticFile": "magnetic_measurements.zip", "indexFile": "", "magnMeasFolder": "" }, "beamline": { } }, "report": "intensityReport" }''') d = pkunit.empty_work_dir() srw.prepare_aux_files(d, data) tu = data.models.tabulatedUndulator assert tu.magnMeasFolder == './' assert tu.indexFileName == 'ivu21_srx_sum.txt' assert d.join(tu.indexFileName).exists()
def _do(file_ext, parse): from pykern import pkio from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp from pykern.pkunit import pkeq, pkfail, pkok from sirepo import srunit import re fc = srunit.flask_client() for suffix in '', ' (2)', ' (3)': for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): json, stream = parse(f) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.get('/{}'.format(sim_type)) is_dev = 'deviance' in f.basename if not is_dev: sim_name = pkcollections.json_load_any(json).models.simulation.name res = fc.sr_post_form( 'importFile', { 'file': (stream, f.basename), 'folder': '/importer_test', }, {'simulation_type': sim_type}, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkeq(expect, res.error) continue pkeq(sim_name + suffix, res.models.simulation.name)
def test_force_login(): fc, sim_type = _fc() from pykern import pkcollections from pykern import pkconfig, pkunit, pkio from pykern.pkdebug import pkdp from pykern.pkunit import pkok, pkre, pkeq from sirepo import http_reply import re # login as a new user, not in db r = fc.sr_post('authEmailLogin', { 'email': '[email protected]', 'simulationType': sim_type }) fc.get(r.url) fc.sr_get('authLogout', {'simulation_type': sim_type}) r = fc.sr_post('listSimulations', {'simulationType': sim_type}, raw_response=True) pkeq(http_reply.SR_EXCEPTION_STATUS, r.status_code) d = pkcollections.json_load_any(r.data) pkeq(http_reply.SR_EXCEPTION_STATE, d.state) pkeq('login', d.srException.routeName) r = fc.sr_post('authEmailLogin', { 'email': '[email protected]', 'simulationType': sim_type }) fc.get(r.url) d = fc.sr_post('listSimulations', {'simulationType': sim_type}) pkeq(1, len(d))
def _do(fc, file_ext, parse): from pykern.pkcollections import PKDict from pykern import pkio, pkcompat from pykern import pkunit from pykern import pkcollections from pykern.pkdebug import pkdp, pkdlog from pykern.pkunit import pkeq, pkfail, pkok, pkre import re for suffix in (('',) if file_ext == 'py' else ('', ' 2', ' 3')): for f in pkio.sorted_glob(pkunit.data_dir().join('*.' + file_ext)): pkdlog('file={}', f) json = pkcompat.from_bytes(parse(f)) sim_type = re.search(r'^([a-z]+)_', f.basename).group(1) fc.sr_get_root(sim_type) is_dev = 'deviance' in f.basename res = fc.sr_post_form( 'importFile', PKDict(folder='/importer_test'), PKDict(simulation_type=sim_type), file=f, ) if is_dev: m = re.search(r'Error: (.+)', json) if m: expect = m.group(1) pkre(expect, res.error) continue elif file_ext == 'py': sim_name = f.purebasename else: sim_name = pkcollections.json_load_any(json).models.simulation.name assert 'models' in res, \ f'file={f} res={res}' pkeq(sim_name + suffix, res.models.simulation.name)
def test_in_request(op, cfg=None, before_request=None, headers=None, want_cookie=True, want_user=True, **kwargs): fc = flask_client(cfg, **kwargs) try: from pykern import pkunit from pykern import pkcollections if before_request: before_request(fc) setattr( server._app, server.SRUNIT_TEST_IN_REQUEST, PKDict(op=op, want_cookie=want_cookie, want_user=want_user), ) from sirepo import uri_router r = fc.get( uri_router.srunit_uri, headers=headers, ) pkunit.pkeq(200, r.status_code, 'FAIL: status={}', r.status) if r.mimetype == 'text/html': m = _JAVASCRIPT_REDIRECT_RE.search(r.data) if m: pkunit.pkfail('redirect={}', m.group(1)) pkunit.pkfail('other html response={}', r.data) d = pkcollections.json_load_any(r.data) pkunit.pkeq('ok', d.get('state'), 'FAIL: data={}', d) finally: try: delattr(server._app, server.SRUNIT_TEST_IN_REQUEST) except AttributeError: pass return r
def test_importer(import_req): from pykern import pkcollections from pykern import pkjson from pykern.pkunit import pkeq from sirepo.template import zgoubi import sirepo.sim_data with pkunit.save_chdir_work() as w: for fn in pkio.sorted_glob(pkunit.data_dir().join('*.dat')): error = None try: data = zgoubi.import_file(import_req(fn), unit_test_mode=True) sirepo.sim_data.get_class('zgoubi').fixup_old_data(data) #TODO(pjm): easier way to convert nested dict to pkcollections.Dict? data = pkcollections.json_load_any(pkjson.dump_pretty(data)) except Exception as e: pkdlog(pkdexc()) error = e.message if error: actual = error else: actual = zgoubi.python_source_for_model(data) outfile = fn.basename + '.txt' pkio.write_text(outfile, actual) e = pkunit.data_dir().join(outfile) expect = pkio.read_text(e) pkeq(expect, actual, 'diff {} {}', e, w.join(outfile))
def test_json_load_any(): """Validate json_load_any()""" import json from pykern import pkcollections from pykern.pkunit import pkeq j = json.dumps({'a': 33}) j2 = pkcollections.json_load_any(j) pkeq( 33, j2.a, '{}: j2.a is not 33', j2.a, ) j = json.dumps({'a': 33, 'b': {'values': 'will collide, but ok'}}) j2 = pkcollections.json_load_any(j) pkcollections.json_load_any(j, object_pairs_hook=pkcollections.PKDict)
def load_any(obj): """Calls `pkcollections.json_load_any` Args: obj (object): str or object with "read" Returns: object: parsed JSON """ from pykern import pkcollections return pkcollections.json_load_any(obj)
def __create(cls, req): try: d = pkcollections.json_load_any( cls.__db_file(req.content.computeJid), ) #TODO(robnagler) when we reconnet with running processes at startup, # we'll need to change this if d.status in _RUNNING_PENDING: d.status = job.CANCELED return cls(req, db=d) except Exception as e: if pykern.pkio.exception_is_not_found(e): return cls(req).__db_write() raise
def _purge_sim(db_file): d = pkcollections.json_load_any(db_file) # OPTIMIZATION: We assume the uids_of_paid_users doesn't change very # frequently so we don't need to check again. A user could run a sim # at anytime so we need to check that they haven't if d.lastUpdateTime > _too_old: return if d.status == job.FREE_USER_PURGED: return p = sirepo.simulation_db.simulation_run_dir(d) pkio.unchecked_remove(p) d.status = job.FREE_USER_PURGED cls.__db_write_file(d) jids_purged.append(db_file.purebasename)
def __db_load(cls, compute_jid): d = pkcollections.json_load_any( cls.__db_file(compute_jid), ) for k in [ 'alert', 'cancelledAfterSecs', 'isPremiumUser', 'jobStatusMessage', 'internalError', ]: d.setdefault(k, None) for h in d.history: h.setdefault(k, None) return d
def add_code(name, version, uri, source_d, virtual_env=None, pyenv=None): """Add a new code to ~?rsmanifest.json Args: name (str): name of the package version (str): commit or version uri (str): repo, source link source_d (str): directory containing virtual_env (str): DEPRECATED pyenv (str): pyenv version """ from pykern import pkcollections from pykern import pkio from pykern import pkjson import datetime import json fn = pkio.py_path(USER_FILE) try: values = pkcollections.json_load_any(fn) except Exception as e: if not (pkio.exception_is_not_found(e) or isinstance(e, ValueError)): raise values = pkcollections.Dict( version=FILE_VERSION, codes=pkcollections.Dict({_NO_PYENV: pkcollections.Dict()}), ) if pyenv: assert not virtual_env, \ 'only one of pyenv or virtual-env (DEPRECATED)' elif virtual_env: assert not pyenv, \ 'only one of pyenv or virtual-env (DEPRECATED)' pyenv = virtual_env if not pyenv: pyenv = _NO_PYENV v = values.codes.get(pyenv) or pkcollections.Dict() v[name.lower()] = pkcollections.Dict( installed=datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), name=name, source_d=source_d, uri=uri, version=version, ) values.codes[pyenv] = v pkjson.dump_pretty(values, filename=fn)
def __db_load(cls, compute_jid): d = pkcollections.json_load_any( cls.__db_file(compute_jid), ) for k in [ 'alert', 'cancelledAfterSecs', 'isPremiumUser', 'jobStatusMessage', 'internalError', ]: d.setdefault(k, None) for h in d.history: h.setdefault(k, None) d.pksetdefault( computeModel=lambda: sirepo.sim_data.split_jid(compute_jid).compute_model, ) return d
def sr_auth_state(self, **kwargs): """Gets authState and prases Returns: dict: parsed auth_state """ m = re.search(r'(\{.*\})', self.sr_get('authState').data) s = pkcollections.json_load_any(m.group(1)) for k, v in kwargs.items(): pkunit.pkeq( v, s[k], 'key={} expected={} != actual={}: auth_state={}', k, v, s[k], s, ) return s
def __db_load(cls, compute_jid): v = None f = cls.__db_file(compute_jid) d = pkcollections.json_load_any(f) for k in [ 'alert', 'canceledAfterSecs', 'isPremiumUser', 'jobStatusMessage', 'internalError', ]: d.setdefault(k, v) for h in d.history: h.setdefault(k, v) d.pksetdefault( computeModel=lambda: sirepo.sim_data.split_jid(compute_jid). compute_model, dbUpdateTime=lambda: f.mtime(), ) if 'cancelledAfterSecs' in d: d.canceledAfterSecs = d.pkdel('cancelledAfterSecs', default=v) for h in d.history: h.canceledAfterSecs = d.pkdel('cancelledAfterSecs', default=v) return d
def _req(route_name, params, query, op, raw_response): """Make request and parse result Args: route_name (str): string name of route params (dict): parameters to apply to route op (func): how to request Returns: object: parsed JSON result """ from sirepo import simulation_db uri = None resp = None try: uri = _uri(route_name, params, query) resp = op(uri) if raw_response: return resp return pkcollections.json_load_any(resp.data) except Exception as e: pkdlog('Exception: {}: msg={} uri={} resp={}', type(e), e, uri, resp) raise
def json_load(*args, **kwargs): #TODO(robnagler) see https://github.com/radiasoft/sirepo/issues/379 # Should work to use pkcollections.Dict #kwargs['object_pairs_hook'] = dict return pkcollections.json_load_any(*args, **kwargs)
def json_load(*args, **kwargs): return pkcollections.json_load_any(*args, **kwargs)
def __init__(self, mgr=None): self.model_data = {} self.mgr = radia_tk.RadiaGeomMgr() if mgr is None else mgr self.on_displayed(self._radia_displayed) self.vtk_viewer = vtk_viewer.Viewer() #TODO(mvk): build view from this schema self.schema = pkcollections.json_load_any( pkio.py_path(pkresource.filename('schema.json')), ) self.view_type_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=VIEW_TYPES, ) self.view_type_list.observe(self._update_viewer, names='value') view_type_list_grp = _label_grp(self.view_type_list, 'View') self.field_type_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=radia_tk.FIELD_TYPES, ) self.field_type_list.observe(self._update_viewer, names='value') field_type_list_grp = _label_grp(self.field_type_list, 'Field') self.path_type_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=PATH_TYPES, ) self.path_type_list.observe(self._update_viewer, names='value') self.path_type_list_grp = _label_grp(self.path_type_list, 'Path') # behavior changes depending on path type chosen self.new_field_point_btn = ipywidgets.Button( description='+', layout={'width': 'fit-content'}, ) self.new_field_point_btn.on_click(self._add_field_point) self.line_begin_pt_flds, line_begin_point_coords_grp = _coord_grp( [-10, 0, 0], {'width': '64px'}) line_begin_grp = _label_grp(line_begin_point_coords_grp, 'Begin') self.line_end_pt_flds, line_end_point_coords_grp = _coord_grp( [10, 0, 0], {'width': '64px'}) line_end_grp = _label_grp(line_end_point_coords_grp, 'End') self.path_num_pts = ipywidgets.IntText(value=10, min=2, max=100, step=1, layout={'width': '48px'}) num_pts_grp = _label_grp(self.path_num_pts, 'Num Points') self.line_grp = ipywidgets.HBox([ line_begin_grp, line_end_grp, num_pts_grp, self.new_field_point_btn ], layout={'padding': '0 6px 0 0'}) self.circle_ctr_flds, circle_ctr_coords_grp = _coord_grp( [0, 0, 0], {'width': '64px'}) circle_ctr_grp = _label_grp(circle_ctr_coords_grp, 'Center') self.circle_radius = ipywidgets.BoundedFloatText( min=0.1, max=1000, value=10.0, layout={'width': '48px'}) circle_radius_grp = _label_grp(self.circle_radius, 'Radius') self.circle_theta = ipywidgets.BoundedFloatText( min=-math.pi, max=math.pi, step=0.1, value=0.0, layout={'width': '48px'}) circle_theta_grp = _label_grp(self.circle_theta, '𝞱') self.circle_phi = ipywidgets.BoundedFloatText(min=-math.pi, max=math.pi, step=0.1, value=0.0, layout={'width': '48px'}) circle_phi_grp = _label_grp(self.circle_phi, '𝞿') self.circle_grp = ipywidgets.HBox([ circle_ctr_grp, circle_radius_grp, circle_theta_grp, circle_phi_grp, num_pts_grp, self.new_field_point_btn ], layout={'padding': '0 6px 0 0'}) #self.pt_file_btn = ipywidgets.FileUpload() self.pt_file_btn = ipywidgets.Button(description='Choose', layout={'width': 'fit-content'}) self.pt_file_btn.on_click(self._upload) self.pt_file_label = ipywidgets.Label('<None>') self.pt_file_label.add_class('rs-file-input-label') self.observe(self._data_loaded, names='file_data') self.pt_file_grp = ipywidgets.HBox( [self.pt_file_btn, self.pt_file_label, self.new_field_point_btn], layout={'padding': '0 6px 0 0'}) self.geom_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=[n for n in self.mgr.get_geoms()]) self.geom_list.observe(self._set_current_geom, names='value') geom_list_grp = _label_grp(self.geom_list, 'Geometry') self.field_color_map_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, ) # the options/value of a dropdown are not syncable! We'll work around it self.field_color_map_list.observe(self._set_field_color_map, names='value') field_map_grp = _label_grp(self.field_color_map_list, 'Color Map') field_map_grp.layout = ipywidgets.Layout(padding='0 6px 0 0') self.vector_scaling_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, ) self.vector_scaling_list.observe(self._set_vector_scaling, names='value') vector_scaling_grp = _label_grp(self.vector_scaling_list, 'Scaling') self.new_field_pt_flds, new_field_point_coords_grp = _coord_grp( [0, 0, 0]) self.new_field_point_grp = ipywidgets.HBox( [new_field_point_coords_grp, self.new_field_point_btn], layout={'padding': '0 6px 0 0'}) self.new_field_point_btn_actions = [ self._add_field_point, self._add_field_line, self._add_field_circle, self._add_field_file ] self.vector_props_grp = ipywidgets.HBox( [field_map_grp, vector_scaling_grp]) self.vector_grp = ipywidgets.HBox([ field_type_list_grp, self.path_type_list_grp, self.line_grp, self.circle_grp, self.new_field_point_grp, self.pt_file_grp, ]) geom_grp = ipywidgets.HBox( [geom_list_grp, view_type_list_grp, self.vector_props_grp], layout={'padding': '3px 0px 3px 0px'}) self.solve_prec = ipywidgets.BoundedFloatText( value=0.0001, min=1e-06, max=10.0, step=1e-06, layout={'width': '72px'}, ) solve_prec_grp = _label_grp( self.solve_prec, 'Precision (' + radia_tk.FIELD_UNITS[radia_tk.FIELD_TYPE_MAG_B] + ')') self.solve_max_iter = ipywidgets.BoundedIntText( value=1500, min=1, max=1e6, step=100, layout={'width': '72px'}, ) solve_max_iter_grp = _label_grp(self.solve_max_iter, 'Max Iterations') self.solve_method = ipywidgets.Dropdown( layout={'width': 'max-content'}, value=0, options=[('0', 0), ('3', 3), ('4', 4), ('5', 5)]) solve_method_grp = _label_grp(self.solve_method, 'Method', layout={}) self.solve_btn = ipywidgets.Button( description='Solve', layout={'width': 'fit-content'}, ) self.solve_btn.on_click(self._solve) spnr = pkio.read_binary(pkresource.filename('sirepo_animated.gif')) self.solve_spinner = ipywidgets.Image(value=spnr, format='gif', width=24, height=24) self.solve_spinner.layout.display = 'none' self.solve_res_label = ipywidgets.Label() #self.export_btn = ipywidgets.Button( # description='Export', # layout={'width': 'fit-content'}, #) #self.export_btn.on_click(self._export) #self.export_link = ipywidgets.HTML( # value='<a href="#" download="xxx">Export</a>' #) #self.export_link.add_class('radia-file-output') self.reset_btn = ipywidgets.Button( description='Reset', layout={'width': 'fit-content'}, ) self.reset_btn.on_click(self._reset) solve_grp = ipywidgets.HBox( [ solve_prec_grp, solve_max_iter_grp, solve_method_grp, self.solve_btn, self.solve_spinner, self.solve_res_label, #self.export_btn, #self.export_link ], layout={'padding': '3px 0px 3px 0px'}) # for enabling/disabling as a whole self.controls = [ self.field_color_map_list, self.new_field_point_btn, self.field_type_list, self.path_type_list, self.solve_btn, self.solve_method, self.solve_max_iter, self.solve_prec, self.vector_scaling_list, self.view_type_list, ] controls_grp = ipywidgets.VBox([geom_grp, solve_grp], layout={'padding': '8px 4px 4px 4px'}) self.observe(self._set_client_props, names='client_props') super(RadiaViewer, self).__init__(children=[ self.vtk_viewer, geom_grp, self.vector_grp, solve_grp, self.out ])