def _generate_lattice(util, code_var, beamline_id): res = util.render_lattice(util.iterate_models( OpalElementIterator(None, _format_field_value), 'elements', ).result, want_semicolon=True) + '\n' count_by_name = PKDict() names = [] res += _generate_beamline(util, code_var, count_by_name, beamline_id, 0, names)[0] res += '{}: LINE=({});\n'.format( util.id_map[beamline_id].name, ','.join(names), ) return res
def _generate_data(g_id, in_data, add_lines=True): try: o = _generate_obj_data(g_id, in_data.name) if in_data.viewType == _SCHEMA.constants.viewTypeObjects: return o elif in_data.viewType == _SCHEMA.constants.viewTypeFields: g = _generate_field_data( g_id, in_data.name, in_data.fieldType, in_data.get('fieldPaths', None) ) if add_lines: _add_obj_lines(g, o) return g except RuntimeError as e: pkdc('Radia error {}', e.message) return PKDict(error=e.message)
def _view_physics_Gravity_GravityMain(self, schema): # http://flash.uchicago.edu/site/flashcode/user_support/rpDoc_4p2.py?submit=rp_Gravity.txt v = PKDict(basic=[ 'useGravity', ], ) if 'physics_Gravity' in schema.model: # Flash docs seem to be wrong. useGravity does not exist in # physics/Gravity. Just physics/Gravity/GravityMain v.basic.insert(1, 'physics_Gravity.grav_boundary_type') if 'physics_Gravity_GravityMain_Constant' in schema.model: v.basic.extend([ 'physics_Gravity_GravityMain_Constant.gconst', 'physics_Gravity_GravityMain_Constant.gdirec', ]) return v
def import_file(req, unit_test_mode=False, **kwargs): from sirepo.template import opal_parser data, input_files = opal_parser.parse_file(pkcompat.from_bytes( req.file_stream.read()), filename=req.filename) missing_files = [] for infile in input_files: if not _SIM_DATA.lib_file_exists(infile.lib_filename): missing_files.append(infile) if len(missing_files): return PKDict( error='Missing data files', missingFiles=missing_files, ) return data
async def _cancel(self, error=False): c = self._app.client.post( '/run-cancel', PKDict( report=self._report, simulationId=self._sid, simulationType=self._app.sim_type, ), self, ) if error: await c return with self._set_waiting_on_status(frame_before_caller=True): await c
def _build_monitor_to_model_fields(data): if _MONITOR_TO_MODEL_FIELDS: return watch_count = 0 kicker_count = 0 for el_idx in range(0, len(data.models.elements)): el = data.models.elements[el_idx] t = el.type if t not in ['WATCH', 'KICKER']: continue if t == 'WATCH': watch_count += 1 for setting in ['hpos', 'vpos']: mon_setting = 'bpm{}_{}'.format(watch_count, setting) _MONITOR_TO_MODEL_FIELDS[mon_setting] = PKDict(element=el.name, setting=setting) elif t == 'KICKER': kicker_count += 1 for setting in ['hkick', 'vkick']: mon_setting = 'corrector{}_{}'.format( kicker_count, 'HCurrent' if setting == 'hkick' else 'VCurrent') _MONITOR_TO_MODEL_FIELDS[mon_setting] = PKDict(element=el.name, setting=setting)
def __fixup_old_data_by_template(cls, data): import pykern.pkcompat import pykern.pkjson import sirepo.simulation_db import subprocess import os import sys with sirepo.simulation_db.tmp_dir() as d: f = d.join('in.json') pykern.pkjson.dump_pretty(data, filename=f, pretty=False) try: #TODO(robnagler) find a better way to do this e = PKDict(os.environ).pkupdate( SIREPO_SRDB_ROOT=str(sirepo.srdb.root()), ) d = sirepo.simulation_db.cfg.tmp_dir if d: e.SIREPO_SIMULATION_DB_TMP_DIR = str(d) e.SIREPO_SIM_DATA_LIB_FILE_RESOURCE_ONLY = '1' else: e.SIREPO_AUTH_LOGGED_IN_USER = str(sirepo.auth.logged_in_user()) n = subprocess.check_output( ['sirepo', 'srw', 'fixup_old_data', str(f)], stderr=subprocess.STDOUT, env=e, ) except subprocess.CalledProcessError as e: pkdlog('sirepo.pkcli.srw.fixup_old_data failed: {}', e.output) raise data.clear() try: data.update(pykern.pkjson.load_any(n)) except Exception as e: pkdlog('unable to parse json={}', n) raise
def init(**imports): global MIME_TYPE, _RELOAD_JS_ROUTES sirepo.util.setattr_imports(imports) MIME_TYPE = PKDict( html='text/html', ipynb='application/x-ipynb+json', js='application/javascript', json='application/json', py='text/x-python', madx='text/plain', ) s = simulation_db.get_schema(sim_type=None) _RELOAD_JS_ROUTES = frozenset( (k for k, v in s.localRoutes.items() if v.get('requireReload')), )
def _get_external_lattice(simulation_id): d = sirepo.simulation_db.read_json( _SIM_DATA.controls_madx_dir().join( simulation_id, sirepo.simulation_db.SIMULATION_DATA_FILE, ), ) _delete_unused_madx_models(d) _delete_unused_madx_commands(d) sirepo.template.madx.uniquify_elements(d) _add_monitor(d) sirepo.template.madx.eval_code_var(d) return PKDict( externalLattice=d, optimizerSettings=_SIM_DATA.default_optimizer_settings(d.models), )
def _compute_range_across_frames(run_dir, data): res = PKDict() for v in _SCHEMA.enum.PhaseSpaceCoordinate: res[v[0]] = [] for v in _SCHEMA.enum.EnergyPlotVariable: res[v[0]] = [] col_names, rows = _read_data_file( py.path.local(run_dir).join(_ZGOUBI_FAI_DATA_FILE)) for field in res: values = column_data(field, col_names, rows) initial_field = _initial_phase_field(field) if initial_field in col_names: values += column_data(initial_field, col_names, rows) if len(res[field]): res[field][0] = min(min(values), res[field][0]) res[field][1] = max(max(values), res[field][1]) else: res[field] = [min(values), max(values)] for field in list(res.keys()): factor = _ANIMATION_FIELD_INFO[field][1] res[field][0] *= factor res[field][1] *= factor res[_initial_phase_field(field)] = res[field] return res
def init_apis(*args, **kwargs): global cfg cfg = pkconfig.init( user_db_root_d=( pkio.py_path(sirepo.srdb.root()).join('jupyterhub', 'user'), pkio.py_path, 'Jupyterhub user db', ), rs_jupyter_migrate=( False, bool, 'give user option to migrate data from jupyter.radiasoft.org'), uri_root=('jupyter', str, 'the root uri of jupyterhub'), ) pkio.mkdir_parent(cfg.user_db_root_d) sirepo.auth_db.init_model(_init_model) sirepo.events.register( PKDict( auth_logout=_event_auth_logout, end_api_call=_event_end_api_call, )) if cfg.rs_jupyter_migrate: sirepo.events.register( PKDict(github_authorized=_event_github_authorized, ))
def __user_name(): if github_handle: if JupyterhubUser.search_by(user_name=github_handle) or \ not _user_dir(user_name=github_handle).exists(): raise sirepo.util.SRException( 'jupyterNameConflict', PKDict(sim_type='jupyterhublogin'), ) return github_handle n = __handle_or_name_sanitized() if JupyterhubUser.search_by(user_name=n): # The username already exists. Add some randomness to try and create # a unique user name. n += _HUB_USER_SEP + sirepo.util.random_base62(3).lower() return n
def login_success_response(sim_type, want_redirect=False): r = None if ( cookie.get_value(_COOKIE_STATE) == _STATE_COMPLETE_REGISTRATION and cookie.get_value(_COOKIE_METHOD) == METHOD_GUEST ): complete_registration() if want_redirect: r = 'completeRegistration' if ( cookie.get_value(_COOKIE_STATE) == _STATE_COMPLETE_REGISTRATION ) else None raise sirepo.util.Redirect(sirepo.uri.local_route(sim_type, route_name=r)) raise sirepo.util.Response( response=http_reply.gen_json_ok(PKDict(authState=_auth_state())), )
def _create_supervisor_state_file(run_dir): try: i, t = _load_in_json(run_dir) except Exception as e: if pkio.exception_is_not_found(e): return raise u = sirepo.simulation_db.uid_from_dir_name(run_dir) sirepo.auth.cfg.logged_in_user = u c = sirepo.sim_data.get_class(i.simulationType) d = PKDict( computeJid=c.parse_jid(i, u), computeJobHash=c.compute_job_hash( i), # TODO(e-carlin): Another user cookie problem computeJobSerial=t, computeJobStart=t, computeModel=c.compute_model(i), error=None, history=[], isParallel=c.is_parallel(i), simulationId=i.simulationId, simulationType=i.simulationType, uid=u, ) d.pkupdate( jobRunMode=sirepo.job.PARALLEL if d.isParallel else sirepo.job.SEQUENTIAL, nextRequestSeconds=c.poll_seconds(i), ) _add_compute_status(run_dir, d) if d.status not in (sirepo.job.COMPLETED, sirepo.job.CANCELED): return if d.isParallel: _add_parallel_status(i, c, run_dir, d) sirepo.util.json_dump(d, path=_db_file(d.computeJid))
def _sdds_column(field, sdds_index=0): column_names = sdds.sddsdata.GetColumnNames(sdds_index) assert field in column_names, 'field not in sdds columns: {}: {}'.format( field, column_names) column_def = sdds.sddsdata.GetColumnDefinition(sdds_index, field) values = sdds.sddsdata.GetColumn( sdds_index, column_names.index(field), ) return PKDict( values=[_safe_sdds_value(v) for v in values], column_names=column_names, column_def=column_def, err=None, )
def parse_file(lattice_text, maxId=0): parser = LineParser(maxId) lines = lattice_text.replace('\r', '').split('\n') prev_line = '' models = PKDict( beamlines=[], elements=[], default_beamline_name=None, rpnVariables=PKDict(), ) for line in lines: parser.increment_line_number() if re.search(r'^\s*\!', line): continue if re.search(r'\&\s*$', line): prev_line += re.sub(r'(\s*\&\s*)$', '', line) continue if not _parse_line(parser, prev_line + line, models): break prev_line = '' models['rpnVariables'] = [ PKDict(name=k, value=v) for k, v in models.rpnVariables.items() ] return models
def background_percent_complete(report, run_dir, is_running): #TODO(robnagler) remove duplication in run_dir.exists() (outer level?) alert, last_element, step = _parse_elegant_log(run_dir) res = PKDict( percentComplete=100, frameCount=0, alert=alert, ) if is_running: data = simulation_db.read_json( run_dir.join(template_common.INPUT_BASE_NAME)) res.percentComplete = _compute_percent_complete( data, last_element, step) return res if not run_dir.join(_ELEGANT_SEMAPHORE_FILE).exists(): return res output_info = _output_info(run_dir) return PKDict( percentComplete=100, frameCount=1, outputInfo=output_info, lastUpdateTime=output_info[0]['lastUpdateTime'], alert=alert, )
def _gen_exception_reply_SRException(args): r = args.routeName p = args.params or PKDict() try: t = sirepo.http_request.sim_type(p.pkdel('sim_type')) s = simulation_db.get_schema(sim_type=t) except Exception as e: pkdc('exception={} stack={}', e, pkdexc()) # sim_type is bad so don't cascade errors, just # try to get the schema without the type t = None s = simulation_db.get_schema(sim_type=None) # If default route or always redirect/reload if r: assert r in s.localRoutes, \ 'route={} not found in schema for type={}'.format(r, t) else: r = sirepo.uri.default_local_route_name(s) p = PKDict(reload_js=True) if ( # must be first, to always delete reload_js not p.pkdel('reload_js') and flask.request.method == 'POST' and r not in _RELOAD_JS_ROUTES): pkdc('POST response={} route={} params={}', SR_EXCEPTION_STATE, r, p) return gen_json( PKDict({ _STATE: SR_EXCEPTION_STATE, SR_EXCEPTION_STATE: args, }), ) pkdc('redirect to route={} params={} type={}', r, p, t) return gen_redirect_for_local_route( t, route=r, params=p, sr_exception=pkjson.dump_pretty(args, pretty=False), )
def sr_sim_data(self, sim_name=None, sim_type=None): """Return simulation data by name Args: sim_name (str): case sensitive name ['Scooby Doo'] sim_type (str): app ['myapp'] Returns: dict: data """ from pykern import pkunit from pykern.pkdebug import pkdpretty self.sr_sim_type_set(sim_type) if not sim_name: sim_name = 'Scooby Doo' d = self.sr_post( 'listSimulations', PKDict( simulationType=self.sr_sim_type, search=PKDict({'simulation.name': sim_name}), )) assert 1 == len(d), \ 'listSimulations name={} returned count={}'.format(sim_name, len(d)) d = d[0].simulation res = self.sr_get_json( 'simulationData', PKDict( simulation_type=self.sr_sim_type, pretty='0', simulation_id=d.simulationId, ), ) pkunit.pkeq(sim_name, res.models.simulation.name) return res
def __crl_to_shadow(self, item): return self.__copy_item( item, PKDict( type='crl', attenuationCoefficient=1e-2 / float(item.attenuationLength), fcyl='0', fhit_c='1', fmirr='4' if item.shape == '1' else '1', focalDistance=float(item.position) * float(item.focalDistance) / (float(item.position) - float(item.focalDistance)), pilingThickness=0, refractionIndex=1 - float(item.refractiveIndex), ))
def sr_animation_run(self, sim_name, compute_model, reports=None, **kwargs): from pykern import pkunit from pykern.pkcollections import PKDict from pykern.pkdebug import pkdp, pkdlog import re data = self.sr_sim_data(sim_name) run = self.sr_run_sim(data, compute_model, **kwargs) for r, a in reports.items(): if 'runSimulation' in a: f = self.sr_run_sim(data, r) for k, v in a.items(): m = re.search('^expect_(.+)', k) if m: pkunit.pkre( v(i) if callable(v) else v, str(f.get(m.group(1))), ) continue if 'frame_index' in a: c = [a.get('frame_index')] else: c = range(run.get(a.get('frame_count_key', 'frameCount'))) assert c, \ 'frame_count_key={} or frameCount={} is zero'.format( a.get('frame_count_key'), a.get('frameCount'), ) pkdlog('frameReport={} count={}', r, c) import sirepo.sim_data s = sirepo.sim_data.get_class(self.sr_sim_type) for i in c: pkdlog('frameIndex={} frameCount={}', i, run.get('frameCount')) f = self.sr_get_json( 'simulationFrame', PKDict(frame_id=s.frame_id(data, run, r, i)), ) for k, v in a.items(): m = re.search('^expect_(.+)', k) if m: pkunit.pkre( v(i) if callable(v) else v, str(f.get(m.group(1))), )
def get_schema(sim_type): """Get the schema for `sim_type` If sim_type is None, it will return the schema for the first sim_type in `feature_config.cfg().sim_types` Args: sim_type (str): must be valid Returns: dict: Shared schem """ t = sirepo.template.assert_sim_type(sim_type) if sim_type is not None \ else list(feature_config.cfg().sim_types)[0] if t in _SCHEMA_CACHE: return _SCHEMA_CACHE[t] schema = read_json( STATIC_FOLDER.join('json/{}-schema'.format(t))) schema.update(SCHEMA_COMMON) schema.update( feature_config=feature_config.for_sim_type(t) ) schema.feature_config = feature_config.for_sim_type(t) schema.simulationType = t _SCHEMA_CACHE[t] = schema #TODO(mvk): improve merging common and local schema _merge_dicts(schema.common.dynamicFiles, schema.dynamicFiles) schema.dynamicModules = _files_in_schema(schema.dynamicFiles) for item in [ 'appDefaults', 'appModes', 'constants', 'cookies', 'enum', 'notifications', 'localRoutes', 'model', 'strings', 'view', ]: if item not in schema: schema[item] = PKDict() _merge_dicts(schema.common[item], schema[item]) _merge_subclasses(schema, item) srschema.validate(schema) return schema
def extract_particle_report(frame_args, particle_type): data_file = open_data_file(frame_args.run_dir, frame_args.frameIndex) xarg = frame_args.x yarg = frame_args.y nbins = frame_args.histogramBins opmd = _opmd_time_series(data_file) data_list = opmd.get_particle( var_list=[xarg, yarg], species=particle_type, iteration=numpy.array([data_file.iteration]), select=None, output=True, plot=False, ) with h5py.File(data_file.filename) as f: data_list.append(main.read_species_data(f, particle_type, 'w', ())) select = _particle_selection_args(frame_args) if select: with h5py.File(data_file.filename) as f: main.apply_selection(f, data_list, select, particle_type, ()) xunits = ' [m]' if len(xarg) == 1 else '' yunits = ' [m]' if len(yarg) == 1 else '' if xarg == 'z': data_list = _adjust_z_width(data_list, data_file) hist, edges = numpy.histogramdd( [data_list[0], data_list[1]], template_common.histogram_bins(nbins), weights=data_list[2], range=[ _select_range(data_list[0], xarg, select), _select_range(data_list[1], yarg, select) ], ) return PKDict( x_range=[float(edges[0][0]), float(edges[0][-1]), len(hist)], y_range=[float(edges[1][0]), float(edges[1][-1]), len(hist[0])], x_label='{}{}'.format(xarg, xunits), y_label='{}{}'.format(yarg, yunits), title='t = {}'.format(_iteration_title(opmd, data_file)), z_matrix=hist.T.tolist(), frameCount=data_file.num_frames, )
def test_myapp_free_user_sim_purged(auth_fc): from pykern import pkio from pykern import pkunit from pykern.pkdebug import pkdp import sirepo.auth def _check_run_dir(should_exist=0): f = pkio.walk_tree(fc.sr_user_dir(), file_re=m) pkunit.pkeq(should_exist, len(f), 'incorrect file count') def _make_user_premium(uid): sirepo.auth_db.UserRole.add_roles(uid, [sirepo.auth.ROLE_PREMIUM]) r = sirepo.auth_db.UserRole.search_all_for_column('uid') pkunit.pkeq(r, [uid], 'expecting one premium user with same id') def _run_sim(data): r = fc.sr_run_sim(data, m) r.simulationType = fc.sr_sim_type r.report = m r.update(data) return r def _status_eq(next_req, status): pkunit.pkeq( status, fc.sr_post('runStatus', next_req).state ) fc = auth_fc m = 'heightWeightReport' user_free = '[email protected]' user_premium = '[email protected]' fc.sr_email_register(user_free) fc.sr_email_register(user_premium) _make_user_premium(fc.sr_auth_state().uid) next_req_premium = _run_sim(fc.sr_sim_data()) fc.sr_email_login(user_free) next_req_free = _run_sim(fc.sr_sim_data()) fc.sr_get_json( 'adjustTime', params=PKDict(days=_PURGE_FREE_AFTER_DAYS + 1), ) time.sleep(_CACHE_AND_SIM_PURGE_PERIOD + 1) _status_eq(next_req_free, 'job_run_purged') _check_run_dir(should_exist=0) fc.sr_email_login(user_premium) _status_eq(next_req_premium, 'completed') _check_run_dir(should_exist=7)
def _apply_clone(g_id, xform): # start with 'identity' xf = radia.TrfTrsl([0, 0, 0]) for clone_xform in xform.transforms: cxf = PKDict(clone_xform) if cxf.model == 'translateClone': txf = radia.TrfTrsl(_split_comma_field(cxf.distance, 'float')) xf = radia.TrfCmbL(xf, txf) if cxf.model == 'rotateClone': rxf = radia.TrfRot(_split_comma_field(cxf.center, 'float'), _split_comma_field(cxf.axis, 'float'), numpy.pi * float(cxf.angle) / 180.) xf = radia.TrfCmbL(xf, rxf) if xform.alternateFields != '0': xf = radia.TrfCmbL(xf, radia.TrfInv()) radia.TrfMlt(g_id, xf, xform.numCopies + 1)
def test_warppba_login(new_user_fc): from pykern.pkunit import pkexcept c, d = _warppba_login_setup(new_user_fc) with pkexcept('SRException.*no-creds'): new_user_fc.sr_run_sim(d, c, expect_completed=False) new_user_fc.sr_post( 'sbatchLogin', PKDict( password='******', report=c, simulationId=d.models.simulation.simulationId, simulationType=d.simulationType, username='******', )) new_user_fc.sr_run_sim(d, c, expect_completed=False)
def __beamline_to_shadow(self, srw, shadow): current_rotation = 0 for item in srw.beamline: #TODO(pjm): implement more beamline elements if item.type == 'watch': shadow.beamline.append(self.__copy_item(item, item)) watch_name = f'watchpointReport{item.id}' shadow[watch_name] = PKDict( colorMap=srw[watch_name].colorMap, ) elif item.type in ('aperture', 'obstacle'): ap = self.__copy_item(item, item) ap.shape = '0' if ap.shape == 'r' else '1' shadow.beamline.append(ap) elif item.type == 'ellipsoidMirror': r = self.__mirror_to_shadow(item, current_rotation, shadow) current_rotation = (current_rotation + r) % 360
def login_fail_redirect(sim_type=None, module=None, reason=None, reload_js=False): raise util.SRException( 'loginFail', PKDict( method=module.AUTH_METHOD, reason=reason, reload_js=reload_js, sim_type=sim_type, ), 'login failed: reason={} method={}', reason, module.AUTH_METHOD, )
def agent_env(env=None, uid=None): env = (env or PKDict()).pksetdefault(**pkconfig.to_environ(( 'pykern.*', 'sirepo.feature_config.job', ))).pksetdefault( PYTHONPATH='', PYTHONSTARTUP='', PYTHONUNBUFFERED='1', SIREPO_AUTH_LOGGED_IN_USER=lambda: uid or sirepo.auth.logged_in_user(), SIREPO_JOB_VERIFY_TLS=cfg.verify_tls, SIREPO_JOB_MAX_MESSAGE_SIZE=cfg.max_message_size, SIREPO_JOB_PING_INTERVAL_SECS=cfg.ping_interval_secs, SIREPO_JOB_PING_TIMEOUT_SECS=cfg.ping_timeout_secs, SIREPO_SRDB_ROOT=lambda: sirepo.srdb.root(), ) return '\n'.join(("export {}='{}'".format(k, v) for k, v in env.items()))
async def job_cmd_reply(self, msg, op_name, text): try: r = pkjson.load_any(text) except Exception: op_name = job.OP_ERROR r = PKDict( state=job.ERROR, error=f'unable to parse job_cmd output', stdout=text, ) try: await self.send(self.format_op(msg, op_name, reply=r)) except Exception as e: pkdlog('reply={} error={} stack={}', r, e, pkdexc()) # something is really wrong, because format_op is messed up raise