def copy_related_files(data, source_path, target_path): # copy any simulation output if os.path.isdir(str(py.path.local(source_path).join('animation'))): animation_dir = py.path.local(target_path).join('animation') pkio.mkdir_parent(str(animation_dir)) for f in glob.glob(str(py.path.local(source_path).join('animation', '*'))): py.path.local(f).copy(animation_dir)
def _random_id(parent_dir, simulation_type=None): """Create a random id in parent_dir Args: parent_dir (py.path): where id should be unique Returns: dict: id (str) and path (py.path) """ pkio.mkdir_parent(parent_dir) r = random.SystemRandom() # Generate cryptographically secure random string for _ in range(5): i = ''.join(r.choice(_ID_CHARS) for x in range(_ID_LEN)) if simulation_type: if find_global_simulation(simulation_type, i): continue d = parent_dir.join(i) try: os.mkdir(str(d)) return pkcollections.Dict(id=i, path=d) except OSError as e: if e.errno == errno.EEXIST: pass raise raise RuntimeError('{}: failed to create unique directory'.format(parent_dir))
def _create_example_and_lib_files(simulation_type): d = simulation_dir(simulation_type) pkio.mkdir_parent(d) for s in examples(simulation_type): save_new_example(simulation_type, s) d = simulation_lib_dir(simulation_type) pkio.mkdir_parent(d) for f in sirepo.template.import_module(simulation_type).static_lib_files(): f.copy(d)
def copy_related_files(data, source_path, target_path): # pixels3d.dat, rs4pi-roi-data.json, dicom/*.json for filename in (_PIXEL_FILE, _ROI_FILE_NAME, _DOSE_FILE, RTDOSE_EXPORT_FILENAME): f = py.path.local(source_path).join(filename) if f.exists(): f.copy(py.path.local(target_path).join(filename)) dicom_dir = py.path.local(target_path).join(_DICOM_DIR) pkio.mkdir_parent(str(dicom_dir)) for f in glob.glob(str(py.path.local(source_path).join(_DICOM_DIR, '*'))): py.path.local(f).copy(dicom_dir)
def _create_example_and_lib_files(simulation_type): d = simulation_dir(simulation_type) pkio.mkdir_parent(d) for s in examples(simulation_type): save_new_example(s) d = simulation_lib_dir(simulation_type) pkio.mkdir_parent(d) template = sirepo.template.import_module(simulation_type) if hasattr(template, 'resource_files'): for f in template.resource_files(): #TODO(pjm): symlink has problems in containers # d.join(f.basename).mksymlinkto(f) f.copy(d)
def copy_related_files(data, source_path, target_path): # copy any simulation output if os.path.isdir(str(py.path.local(source_path).join("animation"))): animation_dir = py.path.local(target_path).join("animation") pkio.mkdir_parent(str(animation_dir)) for f in glob.glob(str(py.path.local(source_path).join("animation", "*"))): shutil.copy(f, str(animation_dir)) # copy element InputFiles to lib _copy_lib_files( data, py.path.local(os.path.dirname(source_path)).join("lib"), py.path.local(os.path.dirname(target_path)).join("lib"), )
def _user_dir_create(): """Create a user and initialize the directory""" uid = _random_id(_user_dir_name())['id'] # Must set before calling simulation_dir flask.session[_UID_ATTR] = uid for app_name in APP_NAMES: d = simulation_dir(app_name) pkio.mkdir_parent(d) for s in examples(app_name): save_new_example(app_name, s) d = simulation_lib_dir(app_name) pkio.mkdir_parent(d) for f in sirepo.template.import_module(app_name).static_lib_files(): f.copy(d)
def tmp_dir(): """Generates tmp directory for the user Returns: py.path: directory to use for temporary work """ return pkio.mkdir_parent(_random_id(_user_dir().join(_TMP_DIR))['path'])
def flask_client(cfg=None): """Return FlaskClient with easy access methods. Creates a new run directory every test file so can assume sharing of state on the server within a file (module). Two methods of interest: `sr_post` and `sr_get`. Args: cfg (dict): extra configuration for reset_state_for_testing Returns: FlaskClient: for local requests to Flask server """ global server a = 'srunit_flask_client' if not cfg: cfg = {} wd = pkunit.work_dir() cfg['SIREPO_SERVER_DB_DIR'] = str(pkio.mkdir_parent(wd.join('db'))) if not (server and hasattr(server.app, a)): with pkio.save_chdir(wd): pkconfig.reset_state_for_testing(cfg) from sirepo import server as s server = s server.app.config['TESTING'] = True server.app.test_client_class = _TestClient server.init() setattr(server.app, a, server.app.test_client()) return getattr(server.app, a)
def test_unchecked_remove(): """Also tests mkdir_parent""" from pykern import pkunit from pykern import pkio with pkunit.save_chdir_work(): fn = 'f1' # Should not throw an exception pkio.unchecked_remove(fn) pkio.write_text(fn, 'hello') pkio.unchecked_remove(fn) assert not os.path.exists(fn), \ 'When file removed, should be gone' for f in ('d1', 'd2/d3'): assert py.path.local(f) == pkio.mkdir_parent(f), \ 'When mkdir_parent is called, returns path passed in' assert os.path.exists('d1'), \ 'When single directory, should exist' assert os.path.exists('d2/d3'), \ 'When nested directory, should exist' with pytest.raises(AssertionError): pkio.unchecked_remove('.') with pytest.raises(AssertionError): pkio.unchecked_remove(os.getcwd()) with pytest.raises(AssertionError): pkio.unchecked_remove('/')
def test_init_tree(): """Normal case""" with pkunit.save_chdir_work(): name = 'proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_tree( name=name, author='zauthor', author_email='*****@*****.**', description='some python project', license='MIT', url='http://example.com', ) pkio.write_text('tests/test_1.py', 'def test_1(): pass') for expect_fn, expect_re in ( ('.gitignore', 'MANIFEST.in'), ('LICENSE', 'The MIT License'), ('README.md', 'licenses/MIT'), ('docs/_static/.gitignore', ''), ('docs/_templates/.gitignore', ''), ('docs/index.rst', name), ('requirements.txt', 'pykern'), ('setup.py', "author='zauthor'"), ('setup.py', r':copyright:.*zauthor\.'), ('tests/.gitignore', '_work'), (name + '/__init__.py', ''), (name + '/package_data/.gitignore', ''), ( '{}/{}_console.py'.format(name, name), r"main\('{}'\)".format(name), ), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re) subprocess.check_call(['git', 'commit', '-m', 'initial']) # Do not install from PyPI pkio.write_text( 'requirements.txt', '-e ' + str(py.path.local(__file__).dirpath().dirpath().dirpath()), ); subprocess.check_call(['python', 'setup.py', 'test']) subprocess.check_call(['python', 'setup.py', 'tox'])
def move_user_simulations(to_uid): """Moves all non-example simulations for the current session into the target user's dir. """ from_uid = cookie.get_user() with _global_lock: for path in glob.glob( str(user_dir_name(from_uid).join('*', '*', SIMULATION_DATA_FILE)), ): data = read_json(path) sim = data['models']['simulation'] if 'isExample' in sim and sim['isExample']: continue dir_path = os.path.dirname(path) new_dir_path = dir_path.replace(from_uid, to_uid) pkdlog('{} -> {}', dir_path, new_dir_path) pkio.mkdir_parent(new_dir_path) os.rename(dir_path, new_dir_path)
def __init__(self, *args, send_reply=True, **kwargs): super().__init__(*args, send_reply=send_reply, **kwargs) self.run_dir = pkio.py_path(self.msg.runDir) self._is_compute = self.msg.jobCmd == 'compute' if self._is_compute: pkio.unchecked_remove(self.run_dir) pkio.mkdir_parent(self.run_dir) self._lib_file_uri = self.msg.get('libFileUri', '') self._lib_file_list_f = '' if self._lib_file_uri: f = self.run_dir.join('sirepo-lib-file-list.txt') pkio.write_text(f, '\n'.join(self.msg.libFileList)) self._lib_file_list_f = str(f) self._in_file = self._create_in_file() self._process = _Process(self) self._terminating = False self._start_time = int(time.time()) self.jid = self.msg.computeJid
def move_user_simulations(to_uid): """Moves all non-example simulations for the current session into the target user's dir. """ from_uid = cookie.get_user() with _global_lock: for path in glob.glob( str( user_dir_name(from_uid).join('*', '*', SIMULATION_DATA_FILE)), ): data = read_json(path) sim = data['models']['simulation'] if 'isExample' in sim and sim['isExample']: continue dir_path = os.path.dirname(path) new_dir_path = dir_path.replace(from_uid, to_uid) pkdlog('{} -> {}', dir_path, new_dir_path) pkio.mkdir_parent(new_dir_path) os.rename(dir_path, new_dir_path)
def tmp_dir(): """Generates new, temporary directory Returns: py.path: directory to use for temporary work """ d = _random_id(_user_dir().join(_TMP_DIR))['path'] pkio.unchecked_remove(d) return pkio.mkdir_parent(d)
async def _do_agent_start(self, op): stdin = None try: cmd, stdin, env = self._agent_cmd_stdin_env(cwd=self._agent_exec_dir) pkdlog('{} agent_exec_dir={}', self, self._agent_exec_dir) # since this is local, we can make the directory; useful for debugging pkio.mkdir_parent(self._agent_exec_dir) self.subprocess = tornado.process.Subprocess( cmd, cwd=self._agent_exec_dir, env=env, stdin=stdin, stderr=subprocess.STDOUT, ) self.subprocess.set_exit_callback(self._agent_on_exit) finally: if stdin: stdin.close()
def test_init_rs_tree(): """Normal case""" with pkunit.save_chdir_work(): name = 'rs_proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_rs_tree( description='some radiasoftee project', ) for expect_fn, expect_re in ( ('LICENSE', 'Apache License'), ('setup.py', "author='RadiaSoft LLC'"), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re)
def test_init_tree(): """Normal case""" with pkunit.save_chdir_work(): name = 'proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_tree( name=name, author='zauthor', author_email='*****@*****.**', description='some python project', license='MIT', url='http://example.com', ) pkio.write_text('tests/test_1.py', 'def test_1(): pass') for expect_fn, expect_re in ( ('.gitignore', 'MANIFEST.in'), ('LICENSE', 'The MIT License'), ('README.md', 'licenses/MIT'), ('docs/_static/.gitignore', ''), ('docs/_templates/.gitignore', ''), ('docs/index.rst', name), ('setup.py', "author='zauthor'"), ('setup.py', r':copyright:.*zauthor\.'), ('tests/.gitignore', '_work'), (name + '/__init__.py', ''), (name + '/package_data/.gitignore', ''), ( '{}/{}_console.py'.format(name, name), r"main\('{}'\)".format(name), ), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re) subprocess.check_call(['git', 'commit', '-m', 'initial']) # Do not install from PyPI pykern_path = py.path.local(__file__).dirpath().dirpath().dirpath() # pykern must be installed for setup.py to be able to be called subprocess.check_call(['pip', 'install', '-e', str(pykern_path)]) subprocess.check_call(['python', 'setup.py', 'test']) subprocess.check_call(['python', 'setup.py', 'tox'])
def test_walk_tree_and_sorted_glob(): """Looks in work_dir""" from pykern import pkunit from pykern import pkio with pkunit.save_chdir_work() as pwd: for f in ('d1/d7', 'd2/d3', 'd4/d5/d6'): pkio.mkdir_parent(f) expect = [] for f in ['d1/d7/f1', 'd4/d5/f2', 'd2/d3/f3']: pkio.write_text(f, '') expect.append(py.path.local(f)) assert sorted(expect) == list(pkio.walk_tree('.')), \ 'When walking tree, should only return files' assert [expect[2]] == list(pkio.walk_tree('.', 'f3')), \ 'When walking tree with file_re, should only return matching files' assert [expect[0]] == list(pkio.walk_tree('.', '^d1')), \ 'When walking tree with file_re, file to match does not include dir being searched' assert pkio.sorted_glob('*[42]') == [py.path.local(f) for f in ('d2', 'd4')]
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) simulation_db.save_simulation_json(simulation_type, data) for d in simulation_db.simulation_dir(simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, ) ) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def prepare_simulation(data, run_dir=None): """Create and install files, update parameters, and generate command. Copies files into the simulation directory (``run_dir``) Updates the parameters in ``data`` and save. Generate the pkcli command to pass to task runner. Args: data (dict): report and model parameters run_dir (py.path.local): defaults to `simulation_run_dir` Returns: list, py.path: pkcli command, simulation directory """ import sirepo.sim_data if run_dir is None: # This is the legacy (pre-runner-daemon) code path run_dir = simulation_run_dir(data, remove_dir=True) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. pkio.mkdir_parent(run_dir) write_status('pending', run_dir) sim_type = data.simulationType template = sirepo.template.import_module(data) s = sirepo.sim_data.get_class(sim_type) s.lib_files_to_run_dir(data, run_dir) update_rsmanifest(data) write_json(run_dir.join(template_common.INPUT_BASE_NAME), data) #TODO(robnagler) encapsulate in template is_p = s.is_parallel(data) template.write_parameters( data, run_dir=run_dir, is_parallel=is_p, ) cmd = [ pkinspect.root_package(template), pkinspect.module_basename(template), 'run-background' if is_p else 'run', str(run_dir), ] return cmd, run_dir
def default_command(): global cfg cfg = pkconfig.init( debug=(pkconfig.channel_in('dev'), bool, 'run supervisor in debug mode'), ip=(sirepo.job.DEFAULT_IP, str, 'ip to listen on'), port=(sirepo.job.DEFAULT_PORT, int, 'what port to listen on'), ) sirepo.srtime.init() sirepo.job_supervisor.init() pkio.mkdir_parent(sirepo.job.DATA_FILE_ROOT) pkio.mkdir_parent(sirepo.job.LIB_FILE_ROOT) app = tornado.web.Application( [ (sirepo.job.AGENT_URI, _AgentMsg), (sirepo.job.SERVER_URI, _ServerReq), (sirepo.job.SERVER_PING_URI, _ServerPing), (sirepo.job.SERVER_SRTIME_URI, _ServerSrtime), (sirepo.job.DATA_FILE_URI + '/(.*)', _DataFileReq), (sirepo.job.SIM_DB_FILE_URI + '/(.+)', sirepo.sim_db_file.FileReq), ], debug=cfg.debug, static_path=sirepo.job.SUPERVISOR_SRV_ROOT.join( sirepo.job.LIB_FILE_URI), # tornado expects a trailing slash static_url_prefix=sirepo.job.LIB_FILE_URI + '/', websocket_max_message_size=sirepo.job.cfg.max_message_bytes, websocket_ping_interval=sirepo.job.cfg.ping_interval_secs, websocket_ping_timeout=sirepo.job.cfg.ping_timeout_secs, ) server = tornado.httpserver.HTTPServer( app, xheaders=True, max_buffer_size=sirepo.job.cfg.max_message_bytes, ) server.listen(cfg.port, cfg.ip) signal.signal(signal.SIGTERM, _sigterm) signal.signal(signal.SIGINT, _sigterm) pkdlog('ip={} port={}', cfg.ip, cfg.port) tornado.ioloop.IOLoop.current().start()
def tmp_dir(chdir=False): """Generates new, temporary directory Args: chdir (bool): if true, will save_chdir Returns: py.path: directory to use for temporary work """ d = None try: d = cfg.tmp_dir or _random_id(_user_dir().join(_TMP_DIR))['path'] pkio.unchecked_remove(d) pkio.mkdir_parent(d) if chdir: with pkio.save_chdir(d): yield d else: yield d finally: if d: pkio.unchecked_remove(d)
def _write_rtdose_file(files, rtdose_path, prefix, filename=_VTI_RTDOSE_ZIP_FILE): rtdose = pydicom.dcmread(rtdose_path) doseinfo = _extract_dcm_info(files, None, rtdose) doseinfo.DoseMax = int(rtdose.pixel_array.max()) doseinfo.DoseGridScaling = rtdose.DoseGridScaling pkdlog('max dose: {}, scaler: {}', doseinfo.DoseMax, doseinfo.DoseGridScaling) pkdlog('max dose (scaled): {}', rtdose.pixel_array.max() * rtdose.DoseGridScaling) #doseinfo.ImagePositionPatient[2] += (doseinfo.Count - 1) * doseinfo.SliceThickness #pkdp('dose pixel array size: {}, len(rtdose.pixel_array)) pkio.mkdir_parent(_PIXEL_DATA_DIR) pkdlog(rtdose.pixel_array.shape) # order frame in direction used by ct (assumes HFS) with open (_PIXEL_DATA_FILE, 'ab') as f: #for di in reversed(range(rtdose.pixel_array.shape[0])): for di in range(rtdose.pixel_array.shape[0]): for yi in range(rtdose.pixel_array.shape[1]): pixels = rtdose.pixel_array[di][yi] # pixels = pixels.astype(np.uint16) pixels.tofile(f) _write_vti_file(filename, doseinfo, prefix) return doseinfo
def default_command(): """Build the distribution tree""" from rsconf import db if pkconfig.channel_in('dev'): from rsconf.pkcli import setup_dev setup_dev.default_command() prev_umask = None try: prev_umask = os.umask(0o27) dbt = db.T() # Outside of srv_d so nginx doesn't see it. However, # make sure the same levels of directory so relative # links to rpm still work. # POSIT: srv_host_d is one level below srv_d tmp_d = (dbt.srv_d + '-tmp').join(db.HOST_SUBDIR) old_d = tmp_d + '-old' new_d = tmp_d + '-new' pkio.unchecked_remove(new_d, old_d) pkio.mkdir_parent(new_d) #TODO(robnagler) make this global pkconfig. Doesn't make sense to # be configured in rsconf_db, because not host-based. for c, hosts in dbt.channel_hosts().items(): for h in hosts: t = T(dbt, c, h) t.create_host(new_d) subprocess.check_call(['chgrp', '-R', db.cfg.srv_group, str(new_d)]) subprocess.check_call(['chmod', '-R', 'g+rX', str(new_d)]) pkio.unchecked_remove(old_d) dst_d = dbt.srv_host_d if dst_d.check(): dst_d.rename(old_d) new_d.rename(dst_d) finally: if prev_umask: os.umask(prev_umask)
def move_user_simulations(from_uid, to_uid): """Moves all non-example simulations `from_uid` into `to_uid`. Only moves non-example simulations. Doesn't delete the from_uid. Args: from_uid (str): source user to_uid (str): dest user """ with _global_lock: for path in glob.glob( str(user_dir_name(from_uid).join('*', '*', SIMULATION_DATA_FILE)), ): data = read_json(path) sim = data['models']['simulation'] if 'isExample' in sim and sim['isExample']: continue dir_path = os.path.dirname(path) new_dir_path = dir_path.replace(from_uid, to_uid) pkdlog('{} -> {}', dir_path, new_dir_path) pkio.mkdir_parent(new_dir_path) os.rename(dir_path, new_dir_path)
def sirepo_init_app(self, app, db_dir): """Initialize cfg with db_dir and register self with Flask Args: app (flask): Flask application object db_dir (py.path.local): db_dir passed on command line """ app.sirepo_db_dir = db_dir data_dir = db_dir.join(_BEAKER_DATA_DIR) lock_dir = data_dir.join(_BEAKER_LOCK_DIR) pkio.mkdir_parent(lock_dir) sc = { 'session.auto': True, 'session.cookie_expires': False, 'session.type': 'file', 'session.data_dir': str(data_dir), 'session.lock_dir': str(lock_dir), } #TODO(robnagler) Generalize? seems like we'll be shadowing lots of config for k in cfg.beaker_session: sc['session.' + k] = cfg.beaker_session[k] app.wsgi_app = beaker.middleware.SessionMiddleware(app.wsgi_app, sc) app.session_interface = self
def tmp_dir(chdir=False, uid=None): """Generates new, temporary directory Args: chdir (bool): if true, will save_chdir uid (str): user id Returns: py.path: directory to use for temporary work """ d = None try: p = user_path(uid, check=True) if uid else logged_in_user_path() d = cfg.tmp_dir or _random_id(p.join(_TMP_DIR), uid=uid)['path'] pkio.unchecked_remove(d) pkio.mkdir_parent(d) if chdir: with pkio.save_chdir(d): yield d else: yield d finally: if d: pkio.unchecked_remove(d)
def prepare_simulation(data): """Create and install files, update parameters, and generate command. Copies files into the simulation directory (``run_dir``). Updates the parameters in ``data`` and save. Generate the pkcli command to pass to task runner. Args: data (dict): report and model parameters Returns: list, py.path: pkcli command, simulation directory """ run_dir = simulation_run_dir(data, remove_dir=True) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. pkio.mkdir_parent(run_dir) write_status('pending', run_dir) sim_type = data['simulationType'] sid = parse_sid(data) template = sirepo.template.import_module(data) template_common.copy_lib_files(data, None, run_dir) write_json(run_dir.join(template_common.INPUT_BASE_NAME), data) #TODO(robnagler) encapsulate in template is_p = is_parallel(data) template.write_parameters( data, run_dir=run_dir, is_parallel=is_p, ) cmd = [ pkinspect.root_package(template), pkinspect.module_basename(template), 'run-background' if is_p else 'run', str(run_dir), ] return cmd, run_dir
def _issues(): if not repo.has_issues: return base = bd + '.issues' d = pkio.mkdir_parent(base) for i in repo.issues(state='all'): j = _trim_body(i) j['comments'] = [_trim_body(c) for c in i.comments()] p = i.pull_request() if p: j['review_comments'] = [ _trim_body(c) for c in p.review_comments() ] pkjson.dump_pretty(j, filename=d.join(str(i.number) + '.json')) _tar(base)
def init_apis(*args, **kwargs): global cfg cfg = pkconfig.init( user_db_root_d=( pkio.py_path(sirepo.srdb.root()).join('jupyterhub', 'user'), pkio.py_path, 'Jupyterhub user db', ), rs_jupyter_migrate=( False, bool, 'give user option to migrate data from jupyter.radiasoft.org'), uri_root=('jupyter', str, 'the root uri of jupyterhub'), ) pkio.mkdir_parent(cfg.user_db_root_d) sirepo.auth_db.init_model(_init_model) sirepo.events.register( PKDict( auth_logout=_event_auth_logout, end_api_call=_event_end_api_call, )) if cfg.rs_jupyter_migrate: sirepo.events.register( PKDict(github_authorized=_event_github_authorized, ))
async def _do_agent_start(self, op): cmd, stdin, env = self._agent_cmd_stdin_env(cwd=self._agent_exec_dir) pkdlog('{} agent_exec_dir={}', self, self._agent_exec_dir) pkio.mkdir_parent(self._agent_exec_dir) c = self.cfg[self.kind] p = ( 'run', # attach to stdin for writing '--attach=stdin', '--init', # keeps stdin open so we can write to it '--interactive', '--name={}'.format(self._cname), '--network=host', '--rm', '--ulimit=core=0', '--ulimit=nofile={}'.format(_MAX_OPEN_FILES), # do not use a "name", but a uid, because /etc/password is image specific, but # IDs are universal. '--user={}'.format(os.getuid()), ) + self._constrain_resources(c) + self._volumes() + (self._image, ) self._cid = await self._cmd(p + cmd, stdin=stdin, env=env) self.driver_details.pkupdate(host=self.host.name) pkdlog('{} cname={} cid={:.12}', self, self._cname, self._cid)
def _write_ct_vti_file(files, prefix): ctinfo = None #instance_numbers = sorted(files.ctmap.keys()) if files.position == 'HFS' else reversed(sorted(files.ctmap.keys())) instance_numbers = sorted(files.ctmap.keys()) first = pydicom.dcmread(files.ctmap[instance_numbers[0]]) if first.ImagePositionPatient[2] \ > pydicom.dcmread(files.ctmap[instance_numbers[-1]]).ImagePositionPatient[2]: instance_numbers = reversed(instance_numbers) is_flipped_lr = first.ImageOrientationPatient[0] == -1 for idx in instance_numbers: frame = pydicom.dcmread(files.ctmap[idx]) ctinfo = _extract_dcm_info(files, ctinfo, frame) pkio.mkdir_parent(_PIXEL_DATA_DIR) with open(_PIXEL_DATA_FILE, 'ab') as f: pixels = frame.pixel_array if is_flipped_lr: pixels = np.fliplr(pixels) #pixels = pixels.astype(np.uint16) pixels.tofile(f) origin = ctinfo.ImagePositionPatient if is_flipped_lr: origin[0] = first.ImagePositionPatient[0] - first.PixelSpacing[0] * (first.Columns - 1) _write_vti_file(_VTI_CT_ZIP_FILE, ctinfo, prefix, origin) return ctinfo
def _subprocess_setup(request, cfg=None, uwsgi=False): """setup the supervisor""" import os from pykern.pkcollections import PKDict sbatch_module = 'sbatch' in request.module.__name__ env = PKDict(os.environ) if not cfg: cfg = PKDict() i = '127.0.0.1' from pykern import pkunit from pykern import pkio # different port than default so can run tests when supervisor running p = '8101' cfg.pkupdate( PYKERN_PKDEBUG_WANT_PID_TIME='1', SIREPO_PKCLI_JOB_SUPERVISOR_IP=i, SIREPO_PKCLI_JOB_SUPERVISOR_PORT=p, SIREPO_SRDB_ROOT=str(pkio.mkdir_parent(pkunit.work_dir().join('db'))), ) if uwsgi: cfg.SIREPO_PKCLI_SERVICE_PORT = '8102' cfg.SIREPO_PKCLI_SERVICE_NGINX_PROXY_PORT = '8180' for x in 'DRIVER_LOCAL', 'DRIVER_DOCKER', 'API', 'DRIVER_SBATCH': cfg['SIREPO_JOB_{}_SUPERVISOR_URI'.format(x)] = 'http://{}:{}'.format(i, p) if sbatch_module: cfg.pkupdate(SIREPO_SIMULATION_DB_SBATCH_DISPLAY='testing@123') env.pkupdate(**cfg) import sirepo.srunit c = None if uwsgi: c = sirepo.srunit.UwsgiClient(env) else: c = sirepo.srunit.flask_client( cfg=cfg, job_run_mode='sbatch' if sbatch_module else None, ) if sbatch_module: # must be performed after fc initialized so work_dir is configured _config_sbatch_supervisor_env(env) _job_supervisor_check(env) return (env, c)
def flask_client(cfg=None, sim_types=None, job_run_mode=None): """Return FlaskClient with easy access methods. Creates a new run directory every test file so can assume sharing of state on the server within a file (module). Two methods of interest: `sr_post` and `sr_get`. Args: cfg (dict): extra configuration for reset_state_for_testing sim_types (str): value for SIREPO_FEATURE_CONFIG_SIM_TYPES [CONFTEST_ALL_CODES] Returns: FlaskClient: for local requests to Flask server """ global server, app a = 'srunit_flask_client' if not cfg: cfg = PKDict() t = sim_types or CONFTEST_ALL_CODES if t: if isinstance(t, (tuple, list)): t = ':'.join(t) cfg['SIREPO_FEATURE_CONFIG_SIM_TYPES'] = t if not (server and hasattr(app, a)): from pykern import pkconfig # initialize pkdebug with correct values pkconfig.reset_state_for_testing(cfg) from pykern import pkunit with pkunit.save_chdir_work() as wd: from pykern import pkio cfg['SIREPO_SRDB_ROOT'] = str(pkio.mkdir_parent(wd.join('db'))) pkconfig.reset_state_for_testing(cfg) from sirepo import server as s server = s app = server.init() app.config['TESTING'] = True app.test_client_class = _TestClient setattr(app, a, app.test_client(job_run_mode=job_run_mode)) return getattr(app, a)
def flask_client(): """Return FlaskClient with easy access methods. Creates a new run directory every test file so can assume sharing of state on the server within a file (module). Two methods of interest: `sr_post` and `sr_get`. Returns: FlaskClient: for local requests to Flask server """ a = 'sr_unit_flask_client' if not hasattr(server.app, a): with pkio.save_chdir(pkunit.work_dir()): db = pkio.mkdir_parent('db') server.app.config['TESTING'] = True server.app.test_client_class = _TestClient server.init(db) setattr(server.app, a, server.app.test_client()) return getattr(server.app, a)
def _cfg_db_dir(value): """Config value or root package's parent or cwd with _DEFAULT_SUBDIR""" from pykern import pkinspect if value: assert os.path.isabs(value), \ '{}: SIREPO_SERVER_DB_DIR must be absolute'.format(value) assert os.path.isdir(value), \ '{}: SIREPO_SERVER_DB_DIR must be a directory and exist'.format(value) value = py.path.local(value) else: assert pkconfig.channel_in('dev'), \ 'SIREPO_SERVER_DB_DIR must be configured except in DEV' fn = sys.modules[pkinspect.root_package(_cfg_db_dir)].__file__ root = py.path.local(py.path.local(py.path.local(fn).dirname).dirname) # Check to see if we are in our dev directory. This is a hack, # but should be reliable. if not root.join('requirements.txt').check(): # Don't run from an install directory root = py.path.local('.') value = pkio.mkdir_parent(root.join(_DEFAULT_DB_SUBDIR)) return value
def _proprietary_codes(): """Get proprietary RPMs and put it in the proprietary code dir Args: uri (str): where to get RPM (file:// or http://) """ import urllib.request import sirepo.feature_config import sirepo.srdb import sirepo.sim_data for s in sirepo.feature_config.cfg().proprietary_sim_types: d = sirepo.srdb.proprietary_code_dir(s) if d.exists(): continue urllib.request.urlretrieve( # POSIT: download/installers/rpm-code/dev-build.sh f'{cfg.proprietary_code_uri}/rscode-{s}-dev.rpm', pkio.mkdir_parent(d).join( sirepo.sim_data.get_class(s).proprietary_code_rpm(), ), )
def _init_root(): global cfg, _root cfg = pkconfig.init(root=(None, _cfg_root, 'where database resides'), ) v = cfg.root if v: assert os.path.isabs(v), \ '{}: SIREPO_SRDB_ROOT must be absolute'.format(v) assert os.path.isdir(v), \ '{}: SIREPO_SRDB_ROOT must be a directory and exist'.format(v) v = pkio.py_path(v) else: assert pkconfig.channel_in('dev'), \ 'SIREPO_SRDB_ROOT must be configured except in DEV' fn = sys.modules[pkinspect.root_package(_init_root)].__file__ root = pkio.py_path(pkio.py_path(pkio.py_path(fn).dirname).dirname) # Check to see if we are in our dev directory. This is a hack, # but should be reliable. if not root.join('requirements.txt').check(): # Don't run from an install directory root = pkio.py_path('.') v = pkio.mkdir_parent(root.join(_DEFAULT_ROOT)) _root = v return v
def test_unchecked_remove(): """Also tests mkdir_parent""" with pkunit.save_chdir_work(): fn = 'f1' # Should not throw an exception pkio.unchecked_remove(fn) pkio.write_text(fn, 'hello') pkio.unchecked_remove(fn) assert not os.path.exists(fn), \ 'When file removed, should be gone' for f in ('d1', 'd2/d3'): assert py.path.local(f) == pkio.mkdir_parent(f), \ 'When mkdir_parent is called, returns path passed in' assert os.path.exists('d1'), \ 'When single directory, should exist' assert os.path.exists('d2/d3'), \ 'When nested directory, should exist' with pytest.raises(AssertionError): pkio.unchecked_remove('.') with pytest.raises(AssertionError): pkio.unchecked_remove(os.getcwd()) with pytest.raises(AssertionError): pkio.unchecked_remove('/')
def auth_fc_module(request): import sirepo.srunit from pykern.pkcollections import PKDict cfg = PKDict( SIREPO_AUTH_BASIC_PASSWORD='******', SIREPO_AUTH_BASIC_UID='dev-no-validate', SIREPO_AUTH_EMAIL_FROM_EMAIL='x', SIREPO_AUTH_EMAIL_FROM_NAME='x', SIREPO_AUTH_EMAIL_SMTP_PASSWORD='******', SIREPO_AUTH_EMAIL_SMTP_SERVER='dev', SIREPO_AUTH_EMAIL_SMTP_USER='******', SIREPO_AUTH_GITHUB_CALLBACK_URI='/uri', SIREPO_AUTH_GITHUB_KEY='key', SIREPO_AUTH_GITHUB_SECRET='secret', SIREPO_AUTH_GUEST_EXPIRY_DAYS='1', SIREPO_AUTH_METHODS='basic:email:guest', SIREPO_FEATURE_CONFIG_API_MODULES='status', ) if 'email3_test' in str(request.fspath): cfg.SIREPO_AUTH_METHODS += ':github' else: cfg.SIREPO_AUTH_DEPRECATED_METHODS = 'github' from pykern import pkconfig pkconfig.reset_state_for_testing(cfg) from pykern import pkunit from pykern import pkio cfg['SIREPO_SRDB_ROOT'] = str( pkio.mkdir_parent(pkunit.work_dir().join('db'))) p, fc = _job_supervisor_start(request, cfg=cfg) if p: yield fc p.terminate() p.wait() else: yield sirepo.srunit.flask_client(cfg=cfg)
def _thumb(image, force): """Returns larger size""" for size, quality in ('50', '25'), ('200', '50'): t = re.sub(r'\w+$', 'jpg', os.path.join(size, image)) if force or not os.path.exists(t): d = pkio.mkdir_parent(py.path.local(t).dirname) try: subprocess.check_call([ 'convert', '-thumbnail', 'x' + size, '-quality', quality + '%', '-background', 'white', '-alpha', 'remove', image + '[0]', t, ]) except: pkdp('dir={}', d) raise return t
def _create_lib_and_examples(simulation_type): import sirepo.sim_data pkio.mkdir_parent(simulation_lib_dir(simulation_type)) for s in examples(simulation_type): save_new_example(s)
def _run_dir(): from sirepo import server if not isinstance(cfg.run_dir, type(py.path.local())): cfg.run_dir = pkio.mkdir_parent(cfg.run_dir) if cfg.run_dir else server.cfg.db_dir.new() return cfg.run_dir
def _run_dir(): return pkio.mkdir_parent(cfg.run_dir)
def _db_dir(): return pkio.mkdir_parent(cfg.db_dir)
def copy_animation_file(source_path, target_path): source_file = str(py.path.local(source_path).join('animation', _MULTI_ELECTRON_FILENAME)) if os.path.isfile(source_file): pkio.mkdir_parent(str(py.path.local(target_path).join('animation'))) target_file = str(py.path.local(target_path).join('animation', _MULTI_ELECTRON_FILENAME)) shutil.copyfile(source_file, target_file)