def test_save_chdir(): from pykern import pkunit from pykern import pkio expect_prev = py.path.local().realpath() expect_new = py.path.local('..').realpath() try: with pkio.save_chdir(expect_new) as new: assert expect_new == new, \ 'save_chdir returns current directory before chdir' assert expect_new == py.path.local().realpath(), \ 'When in save_chdir, expect current directory to be new directory' os.chdir('..') assert expect_new != py.path.local().realpath(), \ 'When in save_chdir, expect chdir to still work' raise IndentationError() except IndentationError as benign_exception: pass assert expect_prev == py.path.local().realpath(), \ 'When exception is raised, current directory should be reverted.' expect_new = pkunit.empty_work_dir().join('new_folder').realpath() with pytest.raises(OSError): with pkio.save_chdir(expect_new) as new: assert False, \ 'When save_chdir given non-existent dir, should throw exception' with pkio.save_chdir(expect_new, mkdir=True) as new: assert expect_new == py.path.local().realpath(), \ 'When save_chdir given non-existent dir and mkdir=True, should pass'
def flask_client(cfg=None): """Return FlaskClient with easy access methods. Creates a new run directory every test file so can assume sharing of state on the server within a file (module). Two methods of interest: `sr_post` and `sr_get`. Args: cfg (dict): extra configuration for reset_state_for_testing Returns: FlaskClient: for local requests to Flask server """ global server a = 'srunit_flask_client' if not cfg: cfg = {} wd = pkunit.work_dir() cfg['SIREPO_SERVER_DB_DIR'] = str(pkio.mkdir_parent(wd.join('db'))) if not (server and hasattr(server.app, a)): with pkio.save_chdir(wd): pkconfig.reset_state_for_testing(cfg) from sirepo import server as s server = s server.app.config['TESTING'] = True server.app.test_client_class = _TestClient server.init() setattr(server.app, a, server.app.test_client()) return getattr(server.app, a)
def run_background(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text('warp_parameters.py'), locals(), locals()) n_stepped = 0 while n_stepped < N_steps: step(10) n_stepped = n_stepped + 10
def flask_client(cfg=None, sim_types=None): """Return FlaskClient with easy access methods. Creates a new run directory every test file so can assume sharing of state on the server within a file (module). Two methods of interest: `sr_post` and `sr_get`. Args: cfg (dict): extra configuration for reset_state_for_testing sim_types (str): value for SIREPO_FEATURE_CONFIG_SIM_TYPES Returns: FlaskClient: for local requests to Flask server """ global server, app a = 'srunit_flask_client' if not cfg: cfg = {} if sim_types: cfg['SIREPO_FEATURE_CONFIG_SIM_TYPES'] = sim_types wd = pkunit.work_dir() cfg['SIREPO_SRDB_ROOT'] = str(pkio.mkdir_parent(wd.join('db'))) if not (server and hasattr(app, a)): with pkio.save_chdir(wd): pkconfig.reset_state_for_testing(cfg) from sirepo import server as s server = s app = server.init() app.config['TESTING'] = True app.test_client_class = _TestClient setattr(app, a, app.test_client()) return getattr(app, a)
def _create_zip(sim_type, sim_id, want_python): """Zip up the json file and its dependencies Args: sim_type (str): simulation type sim_id (str): simulation id want_python (bool): include template's python source? Returns: py.path.Local: zip file name """ from pykern import pkio from sirepo import simulation_db from sirepo.template import template_common #TODO(robnagler) need a lock with pkio.save_chdir(simulation_db.tmp_dir()): res = py.path.local(sim_id + '.zip') data = simulation_db.open_json_file(sim_type, sid=sim_id) if 'report' in data: del data['report'] files = template_common.lib_files(data) files.insert(0, simulation_db.sim_data_file(data.simulationType, sim_id)) if want_python: files.append(_python(data)) with zipfile.ZipFile( str(res), mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True, ) as z: for f in files: z.write(str(f), f.basename) return res, data
def nginx_proxy(): """Starts nginx in container. Used for development only. """ import sirepo.feature_config assert pkconfig.channel_in('dev') run_dir = _run_dir().join('nginx_proxy').ensure(dir=True) with pkio.save_chdir(run_dir) as d: f = run_dir.join('default.conf') c = PKDict(_cfg()).pkupdate(run_dir=str(d)) if 'jupyterhublogin' in sirepo.feature_config.cfg().sim_types: import sirepo.sim_api.jupyterhublogin import sirepo.server sirepo.server.init() c.pkupdate( jupyterhub_root=sirepo.sim_api.jupyterhublogin.cfg.uri_root, ) pkjinja.render_resource('nginx_proxy.conf', c, output=f) cmd = [ 'nginx', '-c', str(f), ] pksubprocess.check_call_with_signals(cmd)
def run_background(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text('warp_parameters.py'), locals(), locals()) n_stepped=0 while n_stepped < N_steps: step(10) n_stepped = n_stepped + 10
def http(): """Starts Flask server""" from sirepo import server db_dir = _db_dir() with pkio.save_chdir(_run_dir()): server.init(db_dir) server.app.run(host=cfg.ip, port=cfg.port, debug=1, threaded=True)
def run(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) data = simulation_db.read_json(template_common.INPUT_BASE_NAME) if data['report'] == 'dogReport': dog = data.models.dog max_age = _max_age(dog.weight) x = np.linspace(0, max_age, int(max_age) + 1).tolist() plots = [ _plot(dog, 'height', x), _plot(dog, 'weight', x), ] res = { 'title': 'Dog Height and Weight Over Time', 'x_range': [0, max_age], 'y_label': '', 'x_label': 'Age (years)', 'x_points': x, 'plots': plots, 'y_range': template_common.compute_plot_color_and_range(plots), } else: raise RuntimeError('unknown report: {}'.format(data['report'])) simulation_db.write_result(res)
def _create_zip(sim_type, sim_id, want_python): """Zip up the json file and its dependencies Args: sim_type (str): simulation type sim_id (str): simulation id want_python (bool): include template's python source? Returns: py.path.Local: zip file name """ from pykern import pkio from sirepo import simulation_db from sirepo.template import template_common #TODO(robnagler) need a lock with pkio.save_chdir(simulation_db.tmp_dir()): res = py.path.local(sim_id + '.zip') data = simulation_db.open_json_file(sim_type, sid=sim_id) files = template_common.lib_files(data) files.insert(0, simulation_db.sim_data_file(data.simulationType, sim_id)) if want_python: files.append(_python(data)) with zipfile.ZipFile( str(res), mode='w', compression=zipfile.ZIP_DEFLATED, allowZip64=True, ) as z: for f in files: z.write(str(f), f.basename) return res, data
def run(cfg_dir): """Run code in ``cfg_dir`` Args: cfg_dir (str): directory to run code in """ template = sirepo.template.import_module(pkinspect.module_basename(run)) with pkio.save_chdir(cfg_dir): _run_code() data = simulation_db.read_json(template_common.INPUT_BASE_NAME) data_file = template.open_data_file(py.path.local()) model = data['models'][data['report']] if data['report'] == 'laserPreviewReport': field = model['field'] coordinate = model['coordinate'] mode = model['mode'] if mode != 'all': mode = int(mode) res = template.extract_field_report(field, coordinate, mode, data_file) elif data['report'] == 'beamPreviewReport': res = template.extract_particle_report( model, 'beam', cfg_dir, data_file, ) simulation_db.write_result(res)
def run_background(cfg_dir): res = {} data = simulation_db.read_json(template_common.INPUT_BASE_NAME) distribution = data['models']['bunch']['distribution'] run_with_mpi = distribution == 'lattice' or distribution == 'file' try: with pkio.save_chdir(cfg_dir): if run_with_mpi: mpi.run_script( pkio.read_text(template_common.PARAMETERS_PYTHON_FILE)) else: #TODO(pjm): MPI doesn't work with rsbeams distributions yet exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) except Exception as e: res = { 'error': str(e), } if run_with_mpi and 'error' in res: text = pkio.read_text('mpi_run.out') m = re.search(r'^Traceback .*?^\w*Error: (.*?)\n\n', text, re.MULTILINE | re.DOTALL) if m: res['error'] = m.group(1) # remove output file - write_result() will not overwrite an existing error output pkio.unchecked_remove( simulation_db.json_filename(template_common.OUTPUT_BASE_NAME)) simulation_db.write_result(res)
def run(cfg_dir): """Run shadow in ``cfg_dir`` Args: cfg_dir (str): directory to run shadow in """ with pkio.save_chdir(cfg_dir): beam = _run_shadow() data = simulation_db.read_json(template_common.INPUT_BASE_NAME) model = data['models'][data['report']] column_values = _SCHEMA['enum']['ColumnValue'] if 'y' in model: x_range = None y_range = None if model['overrideSize'] == '1': x_range = (np.array([ model['horizontalOffset'] - model['horizontalSize'] / 2, model['horizontalOffset'] + model['horizontalSize'] / 2, ]) * _MM_TO_CM).tolist() y_range = (np.array([ model['verticalOffset'] - model['verticalSize'] / 2, model['verticalOffset'] + model['verticalSize'] / 2, ]) * _MM_TO_CM).tolist() ticket = beam.histo2(int(model['x']), int(model['y']), nbins=template_common.histogram_bins(model['histogramBins']), ref=int(model['weight']), nolost=1, calculate_widths=0, xrange=x_range, yrange=y_range) _scale_ticket(ticket) res = { 'x_range': [ticket['xrange'][0], ticket['xrange'][1], ticket['nbins_h']], 'y_range': [ticket['yrange'][0], ticket['yrange'][1], ticket['nbins_v']], 'x_label': _label_with_units(model['x'], column_values), 'y_label': _label_with_units(model['y'], column_values), 'z_label': 'Frequency', 'title': u'{}, {}'.format(_label(model['x'], column_values), _label(model['y'], column_values)), 'z_matrix': ticket['histogram'].T.tolist(), 'frameCount': 1, } else: weight = int(model['weight']) ticket = beam.histo1(int(model['column']), nbins=template_common.histogram_bins(model['histogramBins']), ref=weight, nolost=1, calculate_widths=0) _scale_ticket(ticket) res = { 'title': _label(model['column'], column_values), 'x_range': [ticket['xrange'][0], ticket['xrange'][1], ticket['nbins']], 'y_label': u'{}{}'.format( 'Number of Rays', u' weighted by {}'.format(_label_for_weight(model['weight'], column_values)) if weight else ''), 'x_label': _label_with_units(model['column'], column_values), 'points': ticket['histogram'].T.tolist(), 'frameCount': 1, } #pkdp('range amount: {}', res['x_range'][1] - res['x_range'][0]) #1.55431223448e-15 dist = res['x_range'][1] - res['x_range'][0] #TODO(pjm): only rebalance range if outside of 0 if dist < 1e-14: #TODO(pjm): include offset range for client res['x_range'][0] = 0 res['x_range'][1] = dist simulation_db.write_result(res)
def save_chdir_work(): """Create empty work_dir and chdir Returns: py.path.local: empty work directory """ return pkio.save_chdir(empty_work_dir())
def run(cfg_dir): """Run srw in ``cfg_dir`` Args: cfg_dir (str): directory to run srw in """ with pkio.save_chdir(cfg_dir): _run_srw()
def import_python(code, tmp_dir, user_filename=None, arguments=None): """Converts script_text into json and stores as new simulation. Avoids too much data back to the user in the event of an error. This could be a potential security issue, because the script could be used to probe the system. Args: simulation_type (str): always "srw", but used to find lib dir code (str): Python code that runs SRW user_filename (str): uploaded file name for log arguments (str): argv to be passed to script Returns: dict: simulation data """ script = None # Patch for the mirror profile for the exported .py file from Sirepo: code = _patch_mirror_profile(code) try: with pkio.save_chdir(tmp_dir): # This string won't show up anywhere script = pkio.write_text( 'in.py', re.sub(r'^main\(', '#', code, flags=re.MULTILINE), ) o = SRWParser( script, user_filename=user_filename, arguments=arguments, ) return o.data except Exception as e: lineno = script and _find_line_in_trace(script) if hasattr(e, 'args'): if len(e.args) == 1: m = str(e.args[0]) elif e.args: m = str(e.args) else: m = e.__class__.__name__ else: m = str(e) pkdlog( 'Error: {}; exception={}; script={}; filename={}; stack:\n{}', m, e.__class__.__name__, script, user_filename, pkdexc(), ) m = m[:50] raise ValueError( 'Error on line {}: {}'.format(lineno, m) if lineno else 'Error: {}'.format(m), )
def _run_jspec(run_dir): with pkio.save_chdir(run_dir): data = simulation_db.read_json(template_common.INPUT_BASE_NAME) _elegant_to_madx(data['models']['ring']) exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) jspec_filename = template.JSPEC_INPUT_FILENAME pkio.write_text(jspec_filename, jspec_file) pksubprocess.check_call_with_signals(['jspec', jspec_filename], msg=pkdp, output=template.JSPEC_LOG_FILE) return pkio.read_text(template.JSPEC_LOG_FILE)
def _run_hellweg(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.HELLWEG_INPUT_FILE, input_file) pkio.write_text(template.HELLWEG_INI_FILE, ini_file) solver = BeamSolver(template.HELLWEG_INI_FILE, template.HELLWEG_INPUT_FILE) solver.solve() solver.save_output(template.HELLWEG_SUMMARY_FILE) solver.dump_bin(template.HELLWEG_DUMP_FILE)
def test_is_caller_main(): m1 = pkunit.import_module_from_data_dir('p1.m1') assert not m1.is_caller_main(), \ 'When not called from main, is_caller_main is False' with pkio.save_chdir(pkunit.data_dir()): subprocess.check_call([ sys.executable, '-c', 'from p1 import m1; assert m1.is_caller_main()'])
def run_background(cfg_dir): """Run code in ``cfg_dir`` with mpi Args: cfg_dir (str): directory to run code in """ with pkio.save_chdir(cfg_dir): mpi.run_script(_script()) simulation_db.write_result({})
def run_background(cfg_dir): """Run srw with mpi in ``cfg_dir`` Args: cfg_dir (str): directory to run srw in """ with pkio.save_chdir(cfg_dir): mpi.run_script(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE)) simulation_db.write_result({})
def run_background(cfg_dir): """Run elegant as a background task Args: cfg_dir (str): directory to run elegant in """ with pkio.save_chdir(cfg_dir): _run_elegant(with_mpi=True); simulation_db.write_result({})
def run_background(cfg_dir): """Run elegant as a background task Args: cfg_dir (str): directory to run elegant in """ with pkio.save_chdir(cfg_dir): _run_elegant(with_mpi=True) simulation_db.write_result({})
def test_is_caller_main(): m1 = pkunit.import_module_from_data_dir('p1.m1') assert not m1.is_caller_main(), \ 'When not called from main, is_caller_main is False' with pkio.save_chdir(pkunit.data_dir()): subprocess.check_call([ sys.executable, '-c', 'from p1 import m1; assert m1.is_caller_main()' ])
def _run_tunes_report(cfg_dir, data): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.TUNES_INPUT_FILE, tunes_file) #TODO(pjm): uses datafile from animation directory os.symlink('../animation/zgoubi.fai', 'zgoubi.fai') subprocess.call([_TUNES_PATH]) simulation_db.write_result(template.extract_tunes_report( cfg_dir, data))
def run(cfg_dir): """Run srw in ``cfg_dir`` The files in ``cfg_dir`` must be configured properly. Args: cfg_dir (str): directory to run srw in """ with pkio.save_chdir(cfg_dir): _run_elegant()
def run(cfg_dir): """Run srw in ``cfg_dir`` The files in ``cfg_dir`` must be configured properly. Args: cfg_dir (str): directory to run warp in """ with pkio.save_chdir(cfg_dir): _run_warp()
def run_background(cfg_dir): """Run warpvnd in ``cfg_dir`` with mpi Args: cfg_dir (str): directory to run warpvnd in """ with pkio.save_chdir(cfg_dir): #TODO(pjm): disable running with MPI for now # mpi.run_script(_script()) exec(_script(), locals(), locals()) simulation_db.write_result({})
def run(cfg_dir): """Run elegant in ``cfg_dir`` The files in ``cfg_dir`` must be configured properly. Args: cfg_dir (str): directory to run elegant in """ with pkio.save_chdir(cfg_dir): _run_elegant(bunch_report=True) _extract_bunch_report()
def _run_hellweg(cfg_dir): with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) pkio.write_text(template.HELLWEG_INPUT_FILE, input_file) pkio.write_text(template.HELLWEG_INI_FILE, ini_file) s = solver.BeamSolver(template.HELLWEG_INI_FILE, template.HELLWEG_INPUT_FILE) s.solve() s.save_output(template.HELLWEG_SUMMARY_FILE) s.dump_bin(template.HELLWEG_DUMP_FILE)
def run_background(cfg_dir): res = {} try: with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) except Exception as e: res = { 'error': str(e), } simulation_db.write_result(res)
def save_chdir_work(is_pkunit_prefix=False): """Create empty work_dir and chdir Args: is_pkunit_prefix (bool): use as root of (most) file I/O (optional) Returns: py.path.local: empty work directory """ return pkio.save_chdir(empty_work_dir(), is_pkunit_prefix=is_pkunit_prefix)
def flower(): """Start flower""" assert pkconfig.channel_in('dev') run_dir = _run_dir().join('flower').ensure(dir=True) with pkio.save_chdir(run_dir): command.FlowerCommand().execute_from_commandline([ 'flower', '--address=' + cfg.ip, '--app=sirepo.celery_tasks', '--no-color', '--persistent', ])
def __init__(self): # POSIT: timestamps are sorted in _clone() self._date_d = datetime.datetime.now().strftime('%Y%m%d%H%M%S') with pkio.save_chdir(self._date_d, mkdir=True): sleep = 0 for r in self._iter_subscriptions(): if sleep: time.sleep(sleep) else: sleep = cfg.api_pause_seconds pkdlog('{}: begin', r.full_name) self._repo(r) self._purge()
def _run_dose_calculation(data, cfg_dir): if not feature_config.cfg().rs4pi_dose_calc: dicom_dose = _run_dose_calculation_fake(data, cfg_dir) else: with pkio.save_chdir(cfg_dir): pksubprocess.check_call_with_signals(['bash', str(cfg_dir.join(template.DOSE_CALC_SH))]) dicom_dose = template.generate_rtdose_file(data, cfg_dir) data['models']['dicomDose'] = dicom_dose # save results into simulation input data file, this is needed for further calls to get_simulation_frame() simulation_db.write_json(template_common.INPUT_BASE_NAME, data) simulation_db.write_result({ 'dicomDose': dicom_dose, })
def uwsgi(): """Starts UWSGI server""" run_dir = _run_dir() with pkio.save_chdir(run_dir): values = dict(pkcollections.map_items(cfg)) values['logto'] = None if pkconfig.channel_in('dev') else str(run_dir.join('uwsgi.log')) # uwsgi.py must be first, because values['uwsgi_py'] referenced by uwsgi.yml for f in ('uwsgi.py', 'uwsgi.yml'): output = run_dir.join(f) values[f.replace('.', '_')] = str(output) pkjinja.render_resource(f, values, output=output) cmd = ['uwsgi', '--yaml=' + values['uwsgi_yml']] pksubprocess.check_call_with_signals(cmd)
def uwsgi(): """Starts UWSGI server""" db_dir = _db_dir() run_dir = _run_dir() with pkio.save_chdir(run_dir): values = dict(pkcollections.map_items(cfg)) # uwsgi.py must be first, because referenced by uwsgi.yml for f in ('uwsgi.py', 'uwsgi.yml'): output = run_dir.join(f) values[f.replace('.', '_')] = str(output) pkjinja.render_resource(f, values, output=output) cmd = ['uwsgi', '--yaml=' + values['uwsgi_yml']] subprocess.check_call(cmd)
def flower(): """Start flower""" assert pkconfig.channel_in('dev') run_dir = _run_dir().join('flower').ensure(dir=True) with pkio.save_chdir(run_dir): from flower.command import FlowerCommand FlowerCommand().execute_from_commandline([ 'flower', '--address=' + cfg.ip, '--app=sirepo.celery_tasks', '--no-color', '--persistent', ])
def _run_dose_calculation(data, cfg_dir): if not feature_config.cfg.rs4pi_dose_calc: dicom_dose = _run_dose_calculation_fake(data, cfg_dir) else: with pkio.save_chdir(cfg_dir): pksubprocess.check_call_with_signals(['bash', str(cfg_dir.join(template.DOSE_CALC_SH))]) dicom_dose = template.generate_rtdose_file(data, cfg_dir) data['models']['dicomDose'] = dicom_dose # save results into simulation input data file, this is needed for further calls to get_simulation_frame() simulation_db.write_json(template_common.INPUT_BASE_NAME, data) simulation_db.write_result({ 'dicomDose': dicom_dose, })
def uwsgi(): """Starts UWSGI server""" db_dir =_db_dir() run_dir = _run_dir() with pkio.save_chdir(run_dir): values = dict(pkcollections.map_items(cfg)) # uwsgi.py must be first, because referenced by uwsgi.yml for f in ('uwsgi.py', 'uwsgi.yml'): output = run_dir.join(f) values[f.replace('.', '_')] = str(output) pkjinja.render_resource(f, values, output=output) cmd = ['uwsgi', '--yaml=' + values['uwsgi_yml']] subprocess.check_call(cmd)
def rabbitmq(): assert pkconfig.channel_in('dev') run_dir = _run_dir().join('rabbitmq').ensure(dir=True) with pkio.save_chdir(run_dir): cmd = [ 'docker', 'run', '--env=RABBITMQ_NODE_IP_ADDRESS=' + cfg.ip, '--net=host', '--rm', '--volume={}:/var/lib/rabbitmq'.format(run_dir), 'rabbitmq:management', ] pksubprocess.check_call_with_signals(cmd)
def run_background(cfg_dir): with pkio.save_chdir(cfg_dir): fn = 'run_background.py' cmd = [sys.executable or 'python', fn] script = pkio.read_text('srw_parameters.py') p = dict(pkcollections.map_items(cfg)) if cfg.slave_processes > 1: cmd[0:0] = [ 'mpiexec', '-n', # SRW includes a master process so 2 really needs 3 processes str(cfg.slave_processes + 1), ] script += ''' from mpi4py import MPI if MPI.COMM_WORLD.Get_rank(): import signal signal.signal(signal.SIGTERM, lambda x, y: MPI.COMM_WORLD.Abort(1)) ''' else: # In interactive (dev) mode, output as frequently as possible p['particles_per_slave'] = 1 script += ''' import srwl_bl v = srwl_bl.srwl_uti_parse_options(get_srw_params(), use_sys_argv=False) source_type, mag = setup_source(v) v.wm = True v.wm_nm = {total_particles} v.wm_na = {particles_per_slave} # Number of "iterations" per save is best set to num processes v.wm_ns = {slave_processes} op = get_beamline_optics() srwl_bl.SRWLBeamline(_name=v.name).calc_all(v, op) '''.format(**p) pkio.write_text(fn, script) try: p = subprocess.Popen( cmd, stdin=open(os.devnull), stdout=open('run_background.out', 'w'), stderr=subprocess.STDOUT, ) signal.signal(signal.SIGTERM, lambda x, y: p.terminate()) rc = p.wait() if rc != 0: p = None raise RuntimeError('child terminated: retcode={}'.format(rc)) finally: if not p is None: p.terminate()
def python_to_json(run_dir='.', in_py='in.py', out_json='out.json'): """Run importer in run_dir trying to import py_file Args: run_dir (str): clean directory except for in_py in_py (str): name of the python file in run_dir out_json (str): valid json matching SRW schema """ import sirepo.importer with pkio.save_chdir(run_dir): out = sirepo.importer.python_to_json(in_py) with open(out_json, 'w') as f: f.write(out) return 'Created: {}'.format(out_json)
def celery(): """Start celery""" assert pkconfig.channel_in('dev') import celery.bin.celery import sirepo.celery_tasks run_dir = _run_dir().join('celery').ensure(dir=True) with pkio.save_chdir(run_dir): celery.bin.celery.main(argv=[ 'celery', 'worker', '--app=sirepo.celery_tasks', '--no-color', '-Ofair', '--queue=' + ','.join(sirepo.celery_tasks.QUEUE_NAMES), ])
def http(): """Starts Flask server in http mode. Used for development only. """ from sirepo import server db_dir = _db_dir() with pkio.save_chdir(_run_dir()): server.init(db_dir) server.app.run( host=cfg.ip, port=cfg.port, threaded=True, use_reloader=1, )
def run(cfg_dir): data = simulation_db.read_json(template_common.INPUT_BASE_NAME) report = data['report'] if 'bunchReport' in report or report == 'twissReport' or report == 'twissReport2': try: with pkio.save_chdir(cfg_dir): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) template.save_report_data(data, py.path.local(cfg_dir)) except Exception as e: res = template.parse_error_log(py.path.local(cfg_dir)) or { 'error': str(e), } simulation_db.write_result(res) else: raise RuntimeError('unknown report: {}'.format(report))
def restore(git_txz): """Restores the git directory (only) to a new directory with the .git.txz suffix """ m = re.search('(([^/]+)\.git)\.txz$', git_txz) if not m: raise ValueError(git_txz, ': does not end in .git.txz') git_txz = pkio.py_path(git_txz) d = m.group(2) pkdc('restore: {}', d) g = m.group(1) with pkio.save_chdir(d, mkdir=True): _shell(['tar', 'xJf', str(git_txz)]) os.rename(g, '.git') _shell(['git', 'config', 'core.bare', 'false']) _shell(['git', 'config', 'core.logallrefupdates', 'true']) _shell(['git', 'checkout'])
def import_python(code, tmp_dir, lib_dir, user_filename=None, arguments=None): """Converts script_text into json and stores as new simulation. Avoids too much data back to the user in the event of an error. This could be a potential security issue, because the script could be used to probe the system. Args: simulation_type (str): always "srw", but used to find lib dir code (str): Python code that runs SRW user_filename (str): uploaded file name for log arguments (str): argv to be passed to script Returns: dict: simulation data """ script = None # Patch for the mirror profile for the exported .py file from Sirepo: code = _patch_mirror_profile(code, lib_dir) try: with pkio.save_chdir(tmp_dir): # This string won't show up anywhere script = pkio.write_text('in.py', code) o = SRWParser( script, lib_dir=py.path.local(lib_dir), user_filename=user_filename, arguments=arguments, ) return o.data except Exception as e: lineno = script and _find_line_in_trace(script) # Avoid pkdlog( 'Error: {}; exception={}; script={}; filename={}; stack:\n{}', e.message, e, script, user_filename, pkdexc(), ) e = str(e)[:50] raise ValueError( 'Error on line {}: {}'.format(lineno, e) if lineno else 'Error: {}'.format(e))
def test_init_tree(): """Normal case""" with pkunit.save_chdir_work(): name = 'proj1' pkio.mkdir_parent(name) with pkio.save_chdir(name): subprocess.check_call(['git', 'init', '.']) subprocess.check_call(['git', 'config', 'user.email', '*****@*****.**']) subprocess.check_call(['git', 'config', 'user.name', 'pykern']) projex.init_tree( name=name, author='zauthor', author_email='*****@*****.**', description='some python project', license='MIT', url='http://example.com', ) pkio.write_text('tests/test_1.py', 'def test_1(): pass') for expect_fn, expect_re in ( ('.gitignore', 'MANIFEST.in'), ('LICENSE', 'The MIT License'), ('README.md', 'licenses/MIT'), ('docs/_static/.gitignore', ''), ('docs/_templates/.gitignore', ''), ('docs/index.rst', name), ('requirements.txt', 'pykern'), ('setup.py', "author='zauthor'"), ('setup.py', r':copyright:.*zauthor\.'), ('tests/.gitignore', '_work'), (name + '/__init__.py', ''), (name + '/package_data/.gitignore', ''), ( '{}/{}_console.py'.format(name, name), r"main\('{}'\)".format(name), ), ): assert re.search(expect_re, pkio.read_text(expect_fn)), \ '{} should exist and match "{}"'.format(expect_fn, expect_re) subprocess.check_call(['git', 'commit', '-m', 'initial']) # Do not install from PyPI pkio.write_text( 'requirements.txt', '-e ' + str(py.path.local(__file__).dirpath().dirpath().dirpath()), ); subprocess.check_call(['python', 'setup.py', 'test']) subprocess.check_call(['python', 'setup.py', 'tox'])
def default_command(force=False): """Generate index.html files in mm-dd subdirectories Args: force (bool): force thumbs and indexes even if they exist """ if _DIR_RE.search(os.getcwd()): _one_dir(force) else: dirs = list(glob.iglob(_MM_DD)) if not dirs: dirs = list(glob.iglob(_YYYY_MM_DD)) if not dirs: pkcli.command_error('no directories matching YYYY or MM-DD') for d in sorted(dirs): with pkio.save_chdir(d): _one_dir(force)