def _add_particle_paths(electrons, x_points, y_points, z_points, half_height, limit): # adds paths for the particleAnimation report # culls adjacent path points with similar slope count = 0 cull_count = 0 for i in range(min(len(electrons[1]), limit)): res = {'x': [], 'y': [], 'z': []} num_points = len(electrons[1][i]) prev = [None, None, None] for j in range(num_points): curr = [ electrons[1][i][j], electrons[0][i][j], electrons[2][i][j], ] if j > 0 and j < num_points - 1: next = [ electrons[1][i][j+1], electrons[0][i][j+1], electrons[2][i][j+1] ] if _cull_particle_point(curr, next, prev): cull_count += 1 continue res['x'].append(curr[0]) res['y'].append(curr[1]) res['z'].append(curr[2]) prev = curr count += len(res['x']) x_points.append(res['x']) y_points.append(res['y']) z_points.append(res['z']) pkdc('particles: {} paths, {} points {} points culled', len(x_points), count, cull_count)
def _add_particle_paths(electrons, x_points, y_points, z_points, half_height, limit): # adds paths for the particleAnimation report # culls adjacent path points with similar slope count = 0 cull_count = 0 for i in range(min(len(electrons[1]), limit)): res = {'x': [], 'y': [], 'z': []} num_points = len(electrons[1][i]) prev = [None, None, None] for j in range(num_points): curr = [ electrons[1][i][j], electrons[0][i][j], electrons[2][i][j], ] if j > 0 and j < num_points - 1: next = [ electrons[1][i][j + 1], electrons[0][i][j + 1], electrons[2][i][j + 1] ] if _cull_particle_point(curr, next, prev): cull_count += 1 continue res['x'].append(curr[0]) res['y'].append(curr[1]) res['z'].append(curr[2]) prev = curr count += len(res['x']) x_points.append(res['x']) y_points.append(res['y']) z_points.append(res['z']) pkdc('particles: {} paths, {} points {} points culled', len(x_points), count, cull_count)
def _init_uris(app, simulation_db): global _default_route, _empty_route, srunit_uri, _api_to_route, _uri_to_route _uri_to_route = pkcollections.Dict() _api_to_route = pkcollections.Dict() for k, v in simulation_db.SCHEMA_COMMON.route.items(): r = _split_uri(v) try: r.func = _api_funcs[_FUNC_PREFIX + k] except KeyError: pkdc('not adding api, because module not registered: uri={}', v) continue api_auth.assert_api_def(r.func) r.decl_uri = v r.name = k assert not r.base_uri in _uri_to_route, \ '{}: duplicate end point; other={}'.format(v, routes[r.base_uri]) _uri_to_route[r.base_uri] = r _api_to_route[k] = r if r.base_uri == '': _default_route = r if 'srunit' in v: srunit_uri = v assert _default_route, \ 'missing default route' # 'light' is the homePage, not 'root' _empty_route = _uri_to_route.light app.add_url_rule('/<path:path>', '_dispatch', _dispatch, methods=('GET', 'POST')) app.add_url_rule('/', '_dispatch_empty', _dispatch_empty, methods=('GET', 'POST'))
def __init__(self, data): with self._lock: self.jid = simulation_db.job_id(data) pkdc('{}: created', self.jid) if self.jid in self._job: pkdlog( '{}: Collision tid={} celery_state={}', jid, self.async_result, self.async_result and self.async_result.state, ) raise Collision(self.jid) self.cmd, self.run_dir = simulation_db.prepare_simulation(data) self._job[self.jid] = self self.data = data self._job[self.jid] = self self.async_result = self._start_job() pkdc( '{}: started tid={} dir={} queue={} len_jobs={}', self.jid, self.async_result.task_id, self.run_dir, self.celery_queue, len(self._job), )
async def _agent_ready(self, op): if self._websocket_ready.is_set(): return await self._agent_start(op) pkdlog('{} {} await _websocket_ready', self, op) await self._websocket_ready.wait() pkdc('{} websocket alive', op) raise job_supervisor.Awaited()
def main(): """test bending magnet and plot results""" res = test_bending_magnet_infrared() pkdc('Calling plots with array shape: {}...', res.intensity.shape) plt.pcolormesh(res.dim_x, res.dim_y, res.intensity.transpose()) plt.title('Real space for infrared example') plt.colorbar() plt.show()
def _register_sim_api_modules(): for _, n, ispkg in pkgutil.iter_modules( [os.path.dirname(sirepo.sim_api.__file__)], ): if ispkg: continue if not sirepo.template.is_sim_type(n): pkdc(f'not adding apis for unknown sim_type={n}') continue register_api_module(importlib.import_module(f'sirepo.sim_api.{n}'))
def _gen_exception_reply(exc): f = getattr( pykern.pkinspect.this_module(), '_gen_exception_reply_' + exc.__class__.__name__, None, ) pkdc('exception={} sr_args={}', exc, exc.sr_args) if not f: return _gen_exception_error(exc) return f(exc.sr_args)
def request(self, method, path, headers={}, *a, **kw): url = self._url(path) pkdebug.pkdc('{} {}', method, url) return self.agent.request( headers=twisted.web.http_headers.Headers(headers), method=method.encode(), uri=url.encode(), *a, **kw, ).addCallback(self._log_response, method, url)
async def _start_report_job(job_tracker, request): pkdc('start_report_job: {}', request) await job_tracker.start_report_job( request.run_dir, request.jhash, request.backend, request.cmd, pkio.py_path(request.tmp_dir), ) return {}
async def _read_stream(self): t = await self._stream.read_bytes( self._MAX - len(self.text), partial=True, ) pkdc('stderr={}', t) l = len(self.text) + len(t) assert l < self._MAX, \ 'len(bytes)={} greater than _MAX={}'.format(l, _MAX) self.text.extend(t)
def _run(cmd): try: pkdc('{}', ' '.join(cmd)) return pkcompat.from_bytes( subprocess.check_output(cmd, stderr=subprocess.STDOUT), ) except Exception as e: o = '' if hasattr(e, 'output'): o = pkcompat.from_bytes(e.output) pkdlog('command error: cmd={} error={} out={}', cmd, e, o) raise
def test_bending_magnet_infrared(): """Run the bending magnet example, check results, and plot output""" res = run_bending_magnet() flux = res.intensity.sum() \ * (res.dim_x[1] - res.dim_x[0]) * (res.dim_y[1] - res.dim_y[0]) pkdc('Total flux = {:10.5e} photons/s/.1%bw', flux) _assert(2.40966e+08, flux) checksum = np.sum(np.abs(res.wavefront.arEx)) \ + np.sum(np.abs(res.wavefront.arEy)) pkdc('checksum = {}', checksum) _assert(1.845644e10, checksum, 0.1) return res
def __docker(self, cmd): cmd = ['docker'] + cmd try: pkdc('Running: {}', ' '.join(cmd)) return subprocess.check_output( cmd, stdin=open(os.devnull), stderr=subprocess.STDOUT, ).rstrip() except subprocess.CalledProcessError as e: pkdlog('{}: failed: exit={} output={}', cmd, e.returncode, e.output) return None
def _read_h5_path(sim_id, filename, h5path, run_dir=_GEOM_DIR): try: with h5py.File(_get_res_file(sim_id, filename, run_dir=run_dir), 'r') as hf: return template_common.h5_to_dict(hf, path=h5path) except IOError as e: if pkio.exception_is_not_found(e): pkdc(f'{filename} not found in {run_dir}') # need to generate file return None except KeyError: # no such path in file pkdc(f'path {h5path} not found in {run_dir}/{filename}') return None
def test_pkdc_deviance(capsys): """Test max exceptions""" import pykern.pkdebug as d d.init(control='.') for i in range(d.MAX_EXCEPTION_COUNT): d.pkdc('missing format value {}') out, err = capsys.readouterr() assert 'invalid format' in err, \ 'When fmt is incorrect, output indicates format error' d.pkdc('any error{}') out, err = capsys.readouterr() assert '' == err, \ 'When exception_count exceeds MAX_EXCEPTION_COUNT, no output'
def _generate_field_integrals(g_id, f_paths): try: res = PKDict() for p in [fp for fp in f_paths if fp.type == 'line']: res[p.name] = PKDict() p1 = [float(p.beginX), float(p.beginY), float(p.beginZ)] p2 = [float(p.endX), float(p.endY), float(p.endZ)] for i_type in radia_tk.INTEGRABLE_FIELD_TYPES: res[p.name][i_type] = radia_tk.field_integral( g_id, i_type, p1, p2) return res except RuntimeError as e: pkdc('Radia error {}', e.message) return PKDict(error=e.message)
def _generate_data(g_id, in_data, add_lines=True): try: o = _generate_obj_data(g_id, in_data.name) if in_data.viewType == VIEW_TYPE_OBJ: return o elif in_data.viewType == VIEW_TYPE_FIELD: g = _generate_field_data(g_id, in_data.name, in_data.fieldType, in_data.get('fieldPaths', None)) if add_lines: _add_obj_lines(g, o) return g except RuntimeError as e: pkdc('Radia error {}', e.message) return PKDict(error=e.message)
def test_pkdc(capsys): """Verify basic output""" # The pkdc statement is four lines forward, hence +4 this_file = os.path.relpath(__file__) control = this_file + ':' + str(inspect.currentframe().f_lineno + 4) + ':test_pkdc t1' from pykern import pkdebug from pykern.pkdebug import pkdc, init, pkdp init(control=control) pkdc('t1') out, err = capsys.readouterr() assert control + '\n' == err , \ 'When control exactly matches file:line:func msg, output is same' pkdc('t2') out, err = capsys.readouterr() assert '' == err, \ 'When pkdc msg does not match control, no output' init(control='t3') pkdc('t3 {}', 'p3') out, err = capsys.readouterr() assert 'test_pkdc t3' in err, \ 'When control is simple msg match, expect output' assert 't3 p3\n' in err, \ 'When positional format *args, expect positional param in output' output = six.StringIO() init(control='t4', output=output) pkdc('t4 {k4}', k4='v4') out, err = capsys.readouterr() assert 'test_pkdc t4 v4' in output.getvalue(), \ 'When params is **kwargs, value is formatted from params' assert '' == err, \ 'When output is passed to init(), stderr is empty'
def _cmd(host, cmd): c = _hosts[host].cmd_prefix + cmd try: pkdc('Running: {}', ' '.join(c)) return subprocess.check_output( c, stdin=open(os.devnull), stderr=subprocess.STDOUT, ).rstrip() except subprocess.CalledProcessError as e: if cmd[0] == 'run': pkdlog('{}: failed: exit={} output={}', cmd, e.returncode, e.output) return None
def host_db(self, channel, host): res = Host() v = PKDict( rsconf_db=PKDict( # Common defaults we allow overrides for host_run_d='/srv', run_u='vagrant', root_u='root', ) ) merge_dict(res, v) #TODO(robnagler) optimize by caching default and channels for l in LEVELS: v = self.base[l] if l != LEVELS[0]: v = v.get(channel) if not v: continue if l == LEVELS[2]: v = v.get(host) if not v: continue merge_dict(res, v) host = host.lower() v = PKDict( rsconf_db=PKDict( channel=channel, db_d=self.db_d, host=host, proprietary_source_d=self.proprietary_source_d, rpm_source_d=self.rpm_source_d, secret_d=self.secret_d, srv_d=self.srv_d, srv_host_d=self.srv_host_d, tmp_d=self.tmp_d.join(host), # https://jnovy.fedorapeople.org/pxz/node1.html # compression with 8 threads and max compression # Useful (random) constants compress_cmd='pxz -T8 -9', ) ) v.rsconf_db.resource_paths = _init_resource_paths(v) v.rsconf_db.local_files = _init_local_files(v) pkio.unchecked_remove(v.rsconf_db.tmp_d) pkio.mkdir_parent(v.rsconf_db.tmp_d) merge_dict(res, v) _assert_no_rsconf_db_values(res) _update_paths(res) pkdc('{}', res) return res
def _start(self): """Detach a process from the controlling terminal and run it in the background as a daemon. """ #POSIT: jid is valid docker name (word chars and dash) self.cname = _DOCKER_CONTAINER_PREFIX + self.jid ctx = pkcollections.Dict( kill_secs=_KILL_TIMEOUT_SECS, run_dir=self.run_dir, run_log=self.run_dir.join(template_common.RUN_LOG), run_secs=self.__run_secs(), sh_cmd=self.__sh_cmd(), ) script = str(self.run_dir.join(_DOCKER_CONTAINER_PREFIX + 'run.sh')) with open(str(script), 'wb') as f: f.write(pkjinja.render_resource('runner/docker.sh', ctx)) cmd = [ 'run', #TODO(robnagler) configurable '--cpus=1', '--detach', '--init', '--log-driver=json-file', # never should be large, just for output of the monitor '--log-opt=max-size=1m', '--memory=1g', '--name=' + self.cname, '--network=none', '--rm', '--ulimit=core=0', #TODO(robnagler) this doesn't do anything # '--ulimit=cpu=1', '--ulimit=nofile={}'.format(_MAX_OPEN_FILES), '--user='******'bash', script, ] self.cid = self.__docker(cmd) pkdc( '{}: started cname={} cid={} dir={} len_jobs={} cmd={}', self.jid, self.cname, self.cid, self.run_dir, len(_job_map), ' '.join(cmd), )
def simulate(): t0_setting = time.time() B = 0.4 LeffBM = 4.0 BM = srw.SRWLMagFldM(B, 1, "n", LeffBM) magFldCnt = srw.SRWLMagFldC([BM], pkarray.new_double([0]), pkarray.new_double([0]), pkarray.new_double([0])) eBeam = srw.SRWLPartBeam() eBeam.Iavg = 0.5 eBeam.partStatMom1.x = 0.0 eBeam.partStatMom1.y = 0.0 eBeam.partStatMom1.z = 0.0 eBeam.partStatMom1.xp = 0.0 eBeam.partStatMom1.yp = 0.0 eBeam.partStatMom1.gamma = 3.0 / 0.51099890221e-03 eBeam.arStatMom2[0] = 127.346e-06 ** 2 eBeam.arStatMom2[1] = -10.85e-09 eBeam.arStatMom2[2] = 92.3093e-06 ** 2 eBeam.arStatMom2[3] = 13.4164e-06 ** 2 eBeam.arStatMom2[4] = 0.0072e-09 eBeam.arStatMom2[5] = 0.8022e-06 ** 2 eBeam.arStatMom2[10] = 0.89e-03 ** 2 wfr = srw.SRWLWfr() wfr.allocate(1, 10, 10) distSrcLens = 5.0 wfr.mesh.zStart = distSrcLens wfr.mesh.eStart = 0.5 * 0.123984 wfr.mesh.eFin = wfr.mesh.eStart horAng = 0.1 wfr.mesh.xStart = -0.5 * horAng * distSrcLens wfr.mesh.xFin = 0.5 * horAng * distSrcLens verAng = 0.02 wfr.mesh.yStart = -0.5 * verAng * distSrcLens wfr.mesh.yFin = 0.5 * verAng * distSrcLens wfr.partBeam = eBeam distLensImg = distSrcLens focLen = wfr.mesh.zStart * distLensImg / (distSrcLens + distLensImg) optLens = srw.SRWLOptL(_Fx=focLen, _Fy=focLen) optDrift = srw.SRWLOptD(distLensImg) propagParLens = [1, 1, 1.0, 0, 0, 1.0, 2.0, 1.0, 2.0, 0, 0, 0] propagParDrift = [1, 1, 1.0, 0, 0, 1.0, 1.0, 1.0, 1.0, 0, 0, 0] optBL = srw.SRWLOptC([optLens, optDrift], [propagParLens, propagParDrift]) pkdc("parameters done in {}s", round(time.time() - t0_setting)) meth = 2 relPrec = 0.01 zStartInteg = 0 zEndInteg = 0 npTraj = 20000 useTermin = 1 sampFactNxNyForProp = 0.7 arPrecSR = [meth, relPrec, zStartInteg, zEndInteg, npTraj, useTermin, sampFactNxNyForProp] pkdc("Performing initial electric field calculation...") srw.srwl.CalcElecFieldSR(wfr, 0, magFldCnt, arPrecSR) pkdc("Extracting intensity from calculated electric field and saving it to file(s)") mesh0 = copy.deepcopy(wfr.mesh) arI0 = pkarray.new_float([0] * mesh0.nx * mesh0.ny) srw.srwl.CalcIntFromElecField(arI0, wfr, 6, 0, 3, mesh0.eStart, 0, 0) pkdc("Simulating single-electron electric field wavefront propagation...") srw.srwl.PropagElecField(wfr, optBL) return wfr
def _generate_data(g_id, in_data, add_lines=True): try: o = _generate_obj_data(g_id, in_data.name) if in_data.viewType == _SCHEMA.constants.viewTypeObjects: return o elif in_data.viewType == _SCHEMA.constants.viewTypeFields: g = _generate_field_data( g_id, in_data.name, in_data.fieldType, in_data.get('fieldPaths', None) ) if add_lines: _add_obj_lines(g, o) return g except RuntimeError as e: pkdc('Radia error {}', e.message) return PKDict(error=e.message)
def _start(self): """Detach a process from the controlling terminal and run it in the background as a daemon. """ self.__celery_queue = simulation_db.celery_queue(self.data) self.__async_result = celery_tasks.start_simulation.apply_async( args=[self.cmd, str(self.run_dir)], queue=self.__celery_queue, ) pkdc( '{}: started tid={} dir={} queue={}', self.jid, self.__async_result.task_id, self.run_dir, self.__celery_queue, )
def restore(git_txz): """Restores the git directory (only) to a new directory with the .git.txz suffix """ m = re.search('(([^/]+)\.git)\.txz$', git_txz) if not m: raise ValueError(git_txz, ': does not end in .git.txz') git_txz = pkio.py_path(git_txz) d = m.group(2) pkdc('restore: {}', d) g = m.group(1) with pkio.save_chdir(d, mkdir=True): _shell(['tar', 'xJf', str(git_txz)]) os.rename(g, '.git') _shell(['git', 'config', 'core.bare', 'false']) _shell(['git', 'config', 'core.logallrefupdates', 'true']) _shell(['git', 'checkout'])
def fixup_old_data(data, force=False): """Upgrade data to latest schema and updates version. Args: data (dict): to be updated (destructively) force (bool): force validation Returns: dict: upgraded `data` bool: True if data changed """ try: pkdc( "{} force= {}, version= {} (SCHEMA_COMMON.version={})", data.get('models', {}).get('simulation', {}).get('simulationId', None), force, data.get('version', None), SCHEMA_COMMON.version) if not force and 'version' in data and data.version == SCHEMA_COMMON.version: return data, False try: data.fixup_old_version = data.version except AttributeError: data.fixup_old_version = _OLDEST_VERSION data.version = SCHEMA_COMMON.version if 'simulationType' not in data: if 'sourceIntensityReport' in data.models: data.simulationType = 'srw' elif 'fieldAnimation' in data.models: data.simulationType = 'warppba' elif 'bunchSource' in data.models: data.simulationType = 'elegant' else: pkdlog('simulationType: not found; data={}', data) raise AssertionError('must have simulationType') elif data.simulationType == 'warp': data.simulationType = 'warppba' elif data.simulationType == 'fete': data.simulationType = 'warpvnd' if 'simulationSerial' not in data.models.simulation: data.models.simulation.simulationSerial = 0 import sirepo.sim_data sirepo.sim_data.get_class(data.simulationType).fixup_old_data(data) data.pkdel('fixup_old_version') return data, True except Exception as e: pkdlog('exception={} data={} stack={}', e, data, pkdexc()) raise
async def run_extract_job(self, run_dir, jhash, subcmd, arg): pkdc('{} {}: {} {}', run_dir, jhash, subcmd, arg) status = self.report_job_status(run_dir, jhash) if status is runner_client.JobStatus.MISSING: pkdlog('{} {}: report is missing; skipping extract job', run_dir, jhash) return {} # figure out which backend and any backend-specific info runner_info_file = run_dir.join(_RUNNER_INFO_BASENAME) if runner_info_file.exists(): runner_info = pkjson.load_any(runner_info_file) else: # Legacy run_dir runner_info = pkcollections.Dict( version=1, backend='local', backend_info={}, ) assert runner_info.version == 1 # run the job cmd = ['sirepo', 'extract', subcmd, arg] result = await _BACKENDS[runner_info.backend].run_extract_job( run_dir, cmd, runner_info.backend_info, ) if result.stderr: pkdlog( 'got output on stderr ({} {}):\n{}', run_dir, jhash, result.stderr.decode('utf-8', errors='ignore'), ) if result.returncode != 0: pkdlog( 'failed with return code {} ({} {}), stdout:\n{}', result.returncode, run_dir, subcmd, result.stdout.decode('utf-8', errors='ignore'), ) raise AssertionError return pkjson.load_any(result.stdout)
def parse_rpn_value(value, variable_list): variables = {x['name']: x['value'] for x in variable_list} my_env = os.environ.copy() my_env["RPN_DEFNS"] = _RPN_DEFN_FILE depends = build_variable_dependency(value, variables, []) var_list = ' '.join(map(lambda x: '{} sto {}'.format(variables[x], x), depends)) #TODO(pjm): security - need to scrub field value out = '' try: with open(os.devnull, 'w') as devnull: pkdc('rpnl "{}" "{}"'.format(var_list, value)) out = subprocess.check_output(['rpnl', '{} {}'.format(var_list, value)], env=my_env, stderr=devnull) except subprocess.CalledProcessError as e: return None, 'invalid' if len(out): return float(out.strip()), None return None, 'empty'
def _generate_field_integrals(g_id, f_paths): l_paths = [fp for fp in f_paths if fp.type == 'line'] if len(l_paths) == 0: # return something or server.py will throw an exception return PKDict(warning='No paths') try: res = PKDict() for p in l_paths: res[p.name] = PKDict() p1 = _split_comma_field(p.begin, 'float') p2 = _split_comma_field(p.end, 'float') for i_type in radia_tk.INTEGRABLE_FIELD_TYPES: res[p.name][i_type] = radia_tk.field_integral(g_id, i_type, p1, p2) return res except RuntimeError as e: pkdc('Radia error {}', e.message) return PKDict(error=e.message)
async def _op(self, msg): m = None try: m = pkjson.load_any(msg) pkdlog('op={} opId={} shifterImage={}', m.opName, m.get('opId'), m.get('shifterImage')) pkdc('m={}', job.LogFormatter(m)) return await getattr(self, '_op_' + m.opName)(m) except Exception as e: err = 'exception=' + str(e) stack = pkdexc() pkdlog( 'op={} exception={} stack={}', m and m.get('opName'), e, stack, ) return self.format_op(m, job.OP_ERROR, error=err, stack=stack)
async def _handle_conn(job_tracker, stream): with _catch_and_log_errors(Exception, 'error handling request'): request_bytes = bytearray() while True: chunk = await stream.receive_some(_CHUNK_SIZE) if not chunk: break request_bytes += chunk request = pkjson.load_any(request_bytes) if 'run_dir' in request: request.run_dir = pkio.py_path(request.run_dir) pkdc('runner request: {!r}', request) handler = _RPC_HANDLERS[request.action] async with job_tracker.locks[request.run_dir]: response = await handler(job_tracker, request) pkdc('runner response: {!r}', response) response_bytes = pkjson.dump_bytes(response) await stream.send_all(response_bytes)
def call_api(func_or_name, kwargs=None, data=None): """Call another API with permission checks. Note: also calls `save_to_cookie`. Args: func_or_name (object): api function or name (without `api_` prefix) kwargs (dict): to be passed to API [None] data (dict): will be returned `http_request.parse_json` Returns: flask.Response: result """ p = None s = None try: # must be first so exceptions have access to sim_type if kwargs: # Any (GET) uri will have simulation_type in uri if it is application # specific. s = sirepo.http_request.set_sim_type(kwargs.get('simulation_type')) f = func_or_name if callable(func_or_name) \ else _api_to_route[func_or_name].func sirepo.api_auth.check_api_call(f) try: if data: p = sirepo.http_request.set_post(data) r = flask.make_response(f(**kwargs) if kwargs else f()) finally: if data: sirepo.http_request.set_post(p) except Exception as e: if isinstance(e, (sirepo.util.Reply, werkzeug.exceptions.HTTPException)): pkdc('api={} exception={} stack={}', func_or_name, e, pkdexc()) else: pkdlog('api={} exception={} stack={}', func_or_name, e, pkdexc()) r = sirepo.http_reply.gen_exception(e) finally: # http_request tries to keep a valid sim_type so # this is ok to call (even if s is None) sirepo.http_request.set_sim_type(s) sirepo.cookie.save_to_cookie(r) sirepo.events.emit('end_api_call', PKDict(resp=r)) return r
def __init__(self): self._date_d = datetime.datetime.now().strftime('%Y%m%d%H%M%S') with pkio.save_chdir(self._date_d, mkdir=True): self._login() sleep = 0 for r in self._github.repositories(type='all'): if cfg.test_mode: if r.name != 'pykern': continue if cfg.exclude_re and cfg.exclude_re.search(r.full_name): pkdc('exclude: {}', r.full_name) continue if sleep: time.sleep(sleep) else: sleep = cfg.api_pause_seconds pkdlog('{}: begin', r.full_name) self._repo(r) self._purge()
def _from_cookie_header(self, header): s = None err = None try: match = re.search(r'\b{}=([^;]+)'.format(cfg.http_name), header) if match: s = self._decrypt(match.group(1)) self.update(auth_hook_from_header(self._deserialize(s))) self.incoming_serialized = s return except Exception as e: if 'crypto' in type(e).__module__: # cryptography module exceptions serialize to empty string # so just report the type. e = type(e) err = e pkdc(pkdexc()) if err: pkdlog('Cookie decoding failed: {} value={}', err, s)
async def _incoming(content, handler): try: c = content if not isinstance(content, dict): c = pkjson.load_any(content) if c.get('api') != 'api_runStatus': pkdc( 'class={} content={}', handler.sr_class, c, ) await handler.sr_class(handler=handler, content=c).receive() except Exception as e: pkdlog('exception={} handler={} content={}', e, handler, content) pkdlog(pkdexc()) try: handler.sr_on_exception() except Exception as e: pkdlog('sr_on_exception: exception={}', e)
def __init__(self): self._date_d = datetime.datetime.now().strftime('%Y%m%d%H%M%S') with pkio.save_chdir(self._date_d, mkdir=True): self._login() sleep = 0 for r in self._github.subscriptions(): if cfg.test_mode: if r.name != 'pykern': continue if cfg.exclude_re and cfg.exclude_re.search(r.full_name): pkdc('exclude: {}', r.full_name) continue if sleep: time.sleep(sleep) else: sleep = cfg.api_pause_seconds pkdlog('{}: begin', r.full_name) self._repo(r) self._purge()
def is_processing(cls, jid): with cls._lock: try: self = cls._job[jid] except KeyError: pkdc('{}: not found', jid) return False if self.in_kill: # Strange but true. The process is alive at this point so we # don't want to do anything like start a new process pkdc('{}: in_kill', jid) return True try: os.kill(self.pid, 0) except OSError: # Has to exist so no need to protect del self._job[jid] pkdlog('{}: pid={} does not exist, removing job', jid, pid) return False return True
def read_result(run_dir): """Read result data file from simulation Args: run_dir (py.path): where to find output Returns: dict: result or describes error """ fn = json_filename(template_common.OUTPUT_BASE_NAME, run_dir) res = None err = None try: res = read_json(fn) except Exception as e: pkdc('{}: exception={}', fn, e) err = pkdexc() if pkio.exception_is_not_found(e): #TODO(robnagler) change POSIT matches _SUBPROCESS_ERROR_RE err = 'ERROR: Terminated unexpectedly' # Not found so return run.log as err rl = run_dir.join(template_common.RUN_LOG) try: e = pkio.read_text(rl) if _RUN_LOG_CANCEL_RE.search(e): err = None elif e: err = e except Exception as e: if not pkio.exception_is_not_found(e): pkdlog('{}: error reading log: {}', rl, pkdexc()) else: pkdlog('{}: error reading output: {}', fn, err) if err: return None, err if not res: res = {} if 'state' not in res: # Old simulation or other error, just say is canceled so restarts res = {'state': 'canceled'} return res, None
def _from_cookie_header(self, header): global _try_beaker_compat s = None err = None try: match = re.search(r'\b{}=([^;]+)'.format(cfg.http_name), header) if match: s = self._decrypt(match.group(1)) self.update(self._deserialize(s)) self.incoming_serialized = s set_log_user(self.get(_COOKIE_USER)) return except Exception as e: if 'crypto' in type(e).__module__: # cryptography module exceptions serialize to empty string # so just report the type. e = type(e) err = e pkdc(pkdexc()) # wait for decoding errors until after beaker attempt if not self.get(_COOKIE_SENTINEL) and _try_beaker_compat: try: import sirepo.beaker_compat res = sirepo.beaker_compat.update_session_from_cookie_header(header) if not res is None: self.clear() self.set_sentinel() self.update(res) err = None set_log_user(self.get(_COOKIE_USER)) except AssertionError: pkdlog('Unconfiguring beaker_compat: {}', pkdexc()) _try_beaker_compat = False if err: pkdlog('Cookie decoding failed: {} value={}', err, s)
def parse_rpn_value(value, variable_list): variables = {x['name']: x['value'] for x in variable_list} depends = build_variable_dependency(value, variables, []) #TODO(robnagler) scan variable values for strings. Need to be parsable var_list = ' '.join(map(lambda x: '{} sto {}'.format(variables[x], x), depends)) #TODO(pjm): security - need to scrub field value # execn send top of string stack to UNIX and put result on numerical stack # execs send top of string stack to UNIX and put output on string stack # csh start and enter C shell subprocess # cshs send top of string stack to C shell # gets get string from input file # seems like this would be bad, because you could construct a string that could be executed # mudf make user defined function from string stack (name commands mudf) # open open input/output file # puts put string to file # sleep sleep for number of seconds # @ push command input file pkdc('rpn variables={} expr="{}"', var_list, value) out = elegant_common.subprocess_output(['rpnl', '{} {}'.format(var_list, value)]) if out is None: return None, 'invalid' if len(out): return float(out.strip()), None return None, 'empty'
def _find_job(cls, jid): try: self = cls._job[jid] except KeyError: pkdlog('{}: job not found; len_jobs={}', jid, len(cls._job)) return None res = self.async_result pkdc( '{}: job tid={} celery_state={} len_jobs={}', jid, res, res and res.state, len(cls._job), ) if not res or res.ready(): del self._job[jid] pkdlog( '{}: deleted errant or ready job; tid={} ready={}', jid, res, res and res.ready(), ) return None return self
def FindingArrayMaxima(seq, deltI): pkdc('size:',np.shape(seq)[0]) if seq: j=0 maxV=[] maxI=[] maxIj=0 for i in xrange(0, np.shape(seq)[0]-deltI-1): pkdc(i, seq[i]) if seq[i+1]>seq[i]: maxVj=seq[i+1] maxIj=i+1 pkdc(maxIj) if (maxIj>0) & (i>maxIj+deltI): j=j+1 maxV.append(maxVj) maxI.append(maxIj) maxIj=1000000 else: print('Input array is empty') pkdc('Maximum # and Intensity of UR harmoncis: ',maxI,maxV) return (maxV, maxI)
def _simulation_run_status(data, quiet=False): """Look for simulation status and output Args: data (dict): request quiet (bool): don't write errors to log Returns: dict: status response """ try: #TODO(robnagler): Lock rep = simulation_db.report_info(data) is_processing = cfg.job_queue.is_processing(rep.job_id) is_running = rep.job_status in _RUN_STATES res = {'state': rep.job_status} pkdc( '{}: is_processing={} is_running={} state={} cached_data={}', rep.job_id, is_processing, is_running, rep.job_status, bool(rep.cached_data), ) if is_processing and not is_running: cfg.job_queue.race_condition_reap(rep.job_id) pkdc('{}: is_processing and not is_running', rep.job_id) is_processing = False if is_processing: if not rep.cached_data: return _simulation_error( 'input file not found, but job is running', rep.input_file, ) else: is_running = False if rep.run_dir.exists(): res, err = simulation_db.read_result(rep.run_dir) if err: return _simulation_error(err, 'error in read_result', rep.run_dir) if simulation_db.is_parallel(data): template = sirepo.template.import_module(data) new = template.background_percent_complete( rep.model_name, rep.run_dir, is_running, simulation_db.get_schema(data['simulationType']), ) new.setdefault('percentComplete', 0.0) new.setdefault('frameCount', 0) res.update(new) res['parametersChanged'] = rep.parameters_changed if res['parametersChanged']: pkdlog( '{}: parametersChanged=True req_hash={} cached_hash={}', rep.job_id, rep.req_hash, rep.cached_hash, ) #TODO(robnagler) verify serial number to see what's newer res.setdefault('startTime', _mtime_or_now(rep.input_file)) res.setdefault('lastUpdateTime', _mtime_or_now(rep.run_dir)) res.setdefault('elapsedTime', res['lastUpdateTime'] - res['startTime']) if is_processing: res['nextRequestSeconds'] = simulation_db.poll_seconds(rep.cached_data) res['nextRequest'] = { 'report': rep.model_name, 'reportParametersHash': rep.cached_hash, 'simulationId': rep.cached_data['simulationId'], 'simulationType': rep.cached_data['simulationType'], } pkdc( '{}: processing={} state={} cache_hit={} cached_hash={} data_hash={}', rep.job_id, is_processing, res['state'], rep.cache_hit, rep.cached_hash, rep.req_hash, ) except Exception: return _simulation_error(pkdexc(), quiet=quiet) return res
def test_1(): d = pkunit.data_dir() ## Testing actual SRW calculations ##Reading SRW data SPECTRUM IFileName="Spectrum.txt" f=open(str(d.join(IFileName)),"r")#,1000) e_p=[] I_rad=[] for line in f.readlines(): words = line.split() e_p.append(words[0]) I_rad.append(words[1]) I_radf=map(float,I_rad) maxI=max(I_radf) pkdc(I_radf) print('Spectral Amplitude, ph/s/mrad2',maxI) pkdc(I_radf.index(max(I_radf))) maxIn=maxelements(I_radf) (maxV, maxI)=FindingArrayMaxima(I_radf,5) print(maxI, maxV) f.close() ##Reading SRW data TRAJECTORY IFileName="Trajectory.txt" f=open(str(d.join(IFileName)),"r")#,10000) z_dist=[] x_traj=[] for line in f.readlines(): words = line.split() z_dist.append(words[0]) x_traj.append(words[1]) x_trajectory=map(float, x_traj) z_distance=map(float, z_dist) minX=min(x_trajectory) maxX=max(x_trajectory) minZ=min(z_distance) maxZ=max(z_distance) print ('Length of ID, m', maxZ-minZ) print('Oscillation Amplitude, mm',(maxX-minX)/2) L_trajectory=Path_Length(z_distance, x_trajectory) print('Length of Trajectory, m', L_trajectory) f.close() ##Plotting plot(e_p,I_rad) j=0 for i in maxI: plt.scatter(e_p[i], maxV[j], color='red') j=j+1 # title(TitleP) # xlabel(Xlab) # ylabel(Ylab) grid() plt.show() plot(z_dist,x_trajectory,'.b',linestyle="-") (maxVt, maxIt)=FindingArrayMaxima(map(float,x_trajectory),20) pkdc(maxIt, maxVt) j=0 for i in maxIt: plt.scatter(z_dist[i], maxVt[j], color='red') j=j+1 grid() plt.show()
def test_simulation(): wavefront = simulate() checksum = np.sum( np.abs(wavefront.arEx) ) + np.abs( np.sum(wavefront.arEy) ) pkdc('checksum = {:f}', checksum) _assert(11845644288, checksum) return wavefront
def simulate(): t0_setting = time.time() B = 0.4 LeffBM = 4. BM = srw.SRWLMagFldM(B, 1, 'n', LeffBM) magnetic_field_container = srw.SRWLMagFldC( [BM], pkarray.new_double([0]), pkarray.new_double([0]), pkarray.new_double([0]), ) eBeam = srw.SRWLPartBeam() eBeam.Iavg = 0.5 eBeam.partStatMom1.x = 0. eBeam.partStatMom1.y = 0. eBeam.partStatMom1.z = 0. eBeam.partStatMom1.xp = 0. eBeam.partStatMom1.yp = 0. eBeam.partStatMom1.gamma = 3./0.51099890221e-03 eBeam.arStatMom2[0] = 127.346e-06 ** 2 eBeam.arStatMom2[1] = -10.85e-09 eBeam.arStatMom2[2] = 92.3093e-06 ** 2 eBeam.arStatMom2[3] = 13.4164e-06 ** 2 eBeam.arStatMom2[4] = 0.0072e-09 eBeam.arStatMom2[5] = 0.8022e-06 ** 2 eBeam.arStatMom2[10] = 0.89e-03 ** 2 wavefront = srw.SRWLWfr() wavefront.allocate(1, 10, 10) distSrcLens = 5. wavefront.mesh.zStart = distSrcLens wavefront.mesh.eStart = 0.5 * 0.123984 wavefront.mesh.eFin = wavefront.mesh.eStart horAng = 0.1 wavefront.mesh.xStart = -0.5 * horAng * distSrcLens wavefront.mesh.xFin = 0.5 * horAng * distSrcLens verAng = 0.02 wavefront.mesh.yStart = -0.5 * verAng * distSrcLens wavefront.mesh.yFin = 0.5 * verAng * distSrcLens wavefront.partBeam = eBeam distLensImg = distSrcLens focLen = wavefront.mesh.zStart * distLensImg / (distSrcLens + distLensImg) optBL = _container( srw.SRWLOptL(_Fx=focLen, _Fy=focLen), dict( horizontal_resolution_factor_when_resizing=2., vertical_resolution_factor_when_resizing=2., ), srw.SRWLOptD(distLensImg), {}, ) pkdc('parameters done in {}s', round(time.time() - t0_setting)) meth = 2 relPrec = 0.01 zStartInteg = 0 zEndInteg = 0 npTraj = 20000 useTermin = 1 sampFactNxNyForProp = 0.7 arPrecSR = [meth, relPrec, zStartInteg, zEndInteg, npTraj, useTermin, sampFactNxNyForProp] pkdc('Performing initial electric field calculation...') srw.srwl.CalcElecFieldSR(wavefront, 0, magnetic_field_container, arPrecSR) pkdc('Extracting intensity from calculated electric field and saving it to file(s)') mesh0 = copy.deepcopy(wavefront.mesh) arI0 = pkarray.new_float([0] * mesh0.nx * mesh0.ny) srw.srwl.CalcIntFromElecField(arI0, wavefront, 6, 0, 3, mesh0.eStart, 0, 0) pkdc('Simulating single-electron electric field wavefront propagation...') srw.srwl.PropagElecField(wavefront, optBL) return wavefront
def test_simulation(): wfr = simulate() checksum = np.sum(np.abs(wfr.arEx)) + np.abs(np.sum(wfr.arEy)) pkdc("checksum = {:f}", checksum) _assert(11845644288, checksum) return wfr