def test_magnetic_measurements_zip_file(): from sirepo.template import srw m = srw.MagnMeasZip(pkresource.filename('template/srw/magnetic_measurements.zip', srw)) assert m.index_dir == '' assert m.index_file == 'ivu21_srx_sum.txt' assert m.find_closest_gap('6.72') == 2.500648 assert m.find_closest_gap('8.4') == 2.500648 assert 'ivu21_srx_g6_2c.dat' in m.dat_files m = srw.MagnMeasZip(pkresource.filename('template/srw/magn_meas_fmx.zip', srw)) assert m.index_dir == 'magn_meas' assert m.index_file == 'ivu21_fmx_sum.txt' assert m.find_closest_gap('6.7') == 2.500384 assert m.find_closest_gap('8.4') == 2.501031 assert 'ivu21_fmx_g7_7c.dat' in m.dat_files
def examples(app): files = pkio.walk_tree( pkresource.filename(_EXAMPLE_DIR_FORMAT.format(app)), re.escape(JSON_SUFFIX) + '$', ) #TODO(robnagler) Need to update examples statically before build # and assert on build return [open_json_file(app, path=str(f)) for f in files]
def init_tree(name, author, author_email, description, license, url): """Setup a project tree with: docs, tests, etc., and checkin to git. Creates: setup.py, index.rst, project dir, <name>_console.py, etc. Overwrites files if they exist without checking. Args: name (str): short name of the project, e.g. ``pykern``. author (str): copyright holder, e.g. ``RadiaSoft LLC`` author_email (str): how to reach author, e.g. ``[email protected]`` description (str): one-line summary of project license (str): url of license url (str): website for project, e.g. http://pykern.org """ assert os.path.isdir('.git'), \ 'Must be run from the root directory of the repo' assert not os.path.isdir(name), \ '{}: already exists, only works on fresh repos'.format(name) assert name == py.path.local().basename, \ '{}: name must be the name of the current directory'.format(name) license = license.lower() base = pkresource.filename('projex') values = copy.deepcopy(DEFAULTS) values.update({ 'name': name, 'author': author, 'description': description, 'author_email': author_email, 'url': url, 'license': _license(license, 0), 'classifier_license': _license(license, 1), }) values['copyright_license_rst'] = values['copyright_license_rst'].format(**values) suffix_re = r'\.jinja$' for src in pkio.walk_tree(base, file_re=suffix_re): dst = py.path.local(src).relto(str(base)) dst = dst.replace('projex', name).replace('dot-', '.') dst = re.sub(suffix_re, '', dst) pkio.mkdir_parent_only(dst) _render(src, values, output=dst) src = py.path.local(pkresource.filename('projex-licenses')) src = src.join(license + '.jinja') _render(src, values, output='LICENSE')
def __init__(self, controller, parent=None): super(View, self).__init__(parent) self._controller = controller self.global_params = {} self.setStyleSheet(pkio.read_text(pkresource.filename('srw_pane.css'))) main = QtGui.QHBoxLayout() self._add_action_buttons(main) self._add_param_vbox(main) self._add_result_texts(main) self.setLayout(main)
def load_resource(basename): """Read a resource, making sure all keys and values are locale Args: basename (str): file to read without yml suffix Returns: object: `pkcollections.Dict` or list """ return load_file( pkresource.filename(basename + '.yml', pkinspect.caller_module()))
def xtest_find_tab_undulator_length(): from sirepo.template import srw magnet = pkresource.filename('static/dat/magnetic_measurements.zip', srw) for case in ( (6.82, 'ivu21_srx_g6_8c.dat', 6.8), ('3', 'ivu21_srx_g6_2c.dat', 6.2), (45, 'ivu21_srx_g40_0c.dat', 40), ): res = srw.find_tab_undulator_length(zip_file=magnet, gap=case[0]) assert res['dat_file'] == case[1] assert res['closest_gap'] == case[2] assert abs(res['found_length'] - 2.5) < 1e-3
def render_resource(basename, *args, **kwargs): """Render a pkresource as a jinja template. Args: basename (str): name without jinja extension args (list): see func:`render_file` for rest of args and return """ return render_file( pkresource.filename(basename + '.jinja', pkinspect.caller_module()), *args, **kwargs )
def test_filename(): d = pkunit.data_dir() t1 = importlib.import_module(d.basename + '.t1') assert t1.somefile().startswith('anything'), \ 'When somefile is called, it should return the "anything" file' n = pkresource.filename('test.yml', pkresource) sn = [n] def _tail(): (sn[0], tail) = os.path.split(sn[0]) return tail assert 'test.yml' == _tail(), \ 'nth of resource name is name passed to pkresource' assert 'package_data' == _tail(), \ 'n-1th resource is always "package_data"' assert 'pykern' == _tail(), \ 'n-2th resource is root package of passed in context' with pytest.raises(IOError): # Should not find somefile, because that's in a different context pkresource.filename('somefile', pkresource) assert pkresource.filename('somefile', t1.somefile), \ 'Given any object, should fine resource in root package of that object'
def test_validate_safe_zip(): from sirepo.template.template_common import validate_safe_zip from sirepo.template import srw from sirepo.template.srw import validate_magnet_data_file zip_dir = str(pkunit.data_dir() + '/zip_dir') # Reject a zip with no index file with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_no_index.zip', zip_dir, validate_magnet_data_file) # Reject a zip with an incomplete index file with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_incomplete_index.zip', zip_dir, validate_magnet_data_file) # Reject a zip with entries in index file that are not in zip with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_extra_index.zip', zip_dir, validate_magnet_data_file) # Reject a zip with unacceptable file types with pkunit.pkexcept(AssertionError): validate_safe_zip(zip_dir + '/bad_zip_bad_types.zip', zip_dir, validate_magnet_data_file) # Finally, make sure the included measurement files are OK # We're not really extracting them so just send the test directory as target validate_safe_zip(pkresource.filename('template/srw/magn_meas_chx.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_esm.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_fmx.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_srx.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magn_meas_u20_hxn.zip', srw), zip_dir, validate_magnet_data_file) validate_safe_zip(pkresource.filename('template/srw/magnetic_measurements.zip', srw), zip_dir, validate_magnet_data_file)
def _validate_schema(schema): """Validate the schema Validations performed: Values of default data (if any) Existence of dynamic modules Enums keyed by string value Args: schema (pkcollections.Dict): app schema """ sch_models = schema.model sch_enums = schema.enum sch_ntfy = schema.notifications sch_cookies = schema.cookies for name in sch_enums: for values in sch_enums[name]: if not isinstance(values[0], pkconfig.STRING_TYPES): raise AssertionError(util.err(name, 'enum values must be keyed by a string value: {}', type(values[0]))) for model_name in sch_models: sch_model = sch_models[model_name] for field_name in sch_model: sch_field_info = sch_model[field_name] if len(sch_field_info) <= 2: continue field_default = sch_field_info[2] if field_default == '' or field_default is None: continue _validate_enum(field_default, sch_field_info, sch_enums) _validate_number(field_default, sch_field_info) for n in sch_ntfy: if 'cookie' not in sch_ntfy[n] or sch_ntfy[n].cookie not in sch_cookies: raise AssertionError(util.err(sch_ntfy[n], 'notification must reference a cookie in the schema')) for sc in sch_cookies: _validate_cookie_def(sch_cookies[sc]) for type in schema.dynamicModules: for src in schema.dynamicModules[type]: pkresource.filename(src[1:])
def test_importer(): from sirepo.template.srw_importer import import_python from pykern import pkio from pykern import pkresource from pykern import pkunit from pykern.pkdebug import pkdc, pkdp import glob import py _TESTS = { # Values are optional arguments: 'amx': ('amx', None), 'amx_bl2': ('amx', '--op_BL=2'), 'amx_bl3': ('amx', '--op_BL=3'), 'amx_bl4': ('amx', '--op_BL=4'), 'chx': ('chx', None), 'chx_fiber': ('chx_fiber', None), 'exported_chx': ('exported_chx', None), 'exported_gaussian_beam': ('exported_gaussian_beam', None), 'exported_undulator_radiation': ('exported_undulator_radiation', None), 'lcls_simplified': ('lcls_simplified', None), 'lcls_sxr': ('lcls_sxr', None), 'sample_from_image': ('sample_from_image', None), 'smi_es1_bump_norm': ('smi', '--beamline ES1 --bump --BMmode Norm'), 'smi_es1_nobump': ('smi', '--beamline ES1'), 'smi_es2_bump_lowdiv': ('smi', '--beamline ES2 --bump --BMmode LowDiv'), 'smi_es2_bump_norm': ('smi', '--beamline ES2 --bump --BMmode Norm'), 'srx': ('srx', None), 'srx_bl2': ('srx', '--op_BL=2'), 'srx_bl3': ('srx', '--op_BL=3'), 'srx_bl4': ('srx', '--op_BL=4'), } dat_dir = py.path.local(pkresource.filename('template/srw/', import_python)) with pkunit.save_chdir_work(): work_dir = py.path.local('.') for f in glob.glob(str(dat_dir.join('mirror_*d.dat'))): py.path.local(f).copy(work_dir) py.path.local(str(dat_dir.join('sample.tif'))).copy(work_dir) for b in sorted(_TESTS.keys()): base_py = '{}.py'.format(_TESTS[b][0]) code = pkio.read_text(pkunit.data_dir().join(base_py)) actual = import_python( code, tmp_dir=str(work_dir), lib_dir=str(work_dir), user_filename=r'c:\anything\{}.anysuffix'.format(_TESTS[b][0]), arguments=_TESTS[b][1], ) actual['version'] = 'IGNORE-VALUE' pkunit.assert_object_with_json(b, actual)
def _run_elegant(bunch_report=False, with_mpi=False): exec(pkio.read_text(template_common.PARAMETERS_PYTHON_FILE), locals(), locals()) if bunch_report and re.search('\&sdds_beam\s', elegant_file): return pkio.write_text('elegant.lte', lattice_file) ele = 'elegant.ele' pkio.write_text(ele, elegant_file) # TODO(robnagler) Need to handle this specially, b/c different binary env = copy.deepcopy(os.environ) env['RPN_DEFNS'] = pkresource.filename('defns.rpn') if with_mpi and mpi.cfg.cores > 1: return mpi.run_program(['Pelegant', ele], output=ELEGANT_LOG_FILE, env=env) pksubprocess.check_call_with_signals( ['elegant', ele], output=ELEGANT_LOG_FILE, env=env, msg=pkdp, )
def _sphinx_apidoc(base): """Call `sphinx-apidoc` with appropriately configured ``conf.py``. Args: base (dict): values to be passed to ``conf.py.in`` template """ # Deferred import so initial setup.py works values = copy.deepcopy(base) values['year'] = datetime.datetime.now().year values['empty_braces'] = '{}' from pykern import pkresource data = _read(pkresource.filename('docs-conf.py.format')) _write('docs/conf.py', data.format(**values)) subprocess.check_call( [ 'sphinx-apidoc', '-f', '-o', 'docs', ] + base['packages'], ) return base
def test_importer(): from sirepo.importer import import_python dat_dir = py.path.local(pkresource.filename('static/dat/', import_python)) with pkunit.save_chdir_work(): work_dir = py.path.local('.') for f in glob.glob(str(dat_dir.join('mirror_*d.dat'))): py.path.local(f).copy(work_dir) for b in sorted(_TESTS.keys()): base_py = '{}.py'.format(_TESTS[b][0]) code = pkio.read_text(pkunit.data_dir().join(base_py)) error, actual = import_python( code, tmp_dir=str(work_dir), lib_dir=str(work_dir), user_filename=r'c:\anything\{}.anysuffix'.format(_TESTS[b][0]), arguments=_TESTS[b][1], ) assert not error, \ '{}: should import with an error: {}'.format(base_py, error) actual['version'] = 'IGNORE-VALUE' assert not error, \ '{}: should be valid input'.format(base_py) pkunit.assert_object_with_json(b, actual)
import py.path import re import shutil from pykern import pkio from pykern import pkjinja from pykern import pkresource from sirepo.template import template_common import uti_plot_com WANT_BROWSER_FRAME_CACHE = False _MULTI_ELECTRON_FILENAME = 'res_int_pr_me.dat' #: Where server files and static files are found _STATIC_FOLDER = py.path.local(pkresource.filename('static')) with open(str(_STATIC_FOLDER.join('json/beams.json'))) as f: _PREDEFINED_BEAMS = json.load(f) with open(str(_STATIC_FOLDER.join('json/mirrors.json'))) as f: _PREDEFINED_MIRRORS = json.load(f) with open(str(_STATIC_FOLDER.join('json/srw-schema.json'))) as f: _SCHEMA = json.load(f) def background_percent_complete(data, run_dir, is_running): filename = str(run_dir.join(_MULTI_ELECTRON_FILENAME)) if os.path.isfile(filename): return {
def somefile(): with open(pkresource.filename('somefile')) as f: return f.read()
def __init__(self, mgr=None): self.model_data = {} self.mgr = radia_tk.RadiaGeomMgr() if mgr is None else mgr self.on_displayed(self._radia_displayed) self.vtk_viewer = vtk_viewer.Viewer() #TODO(mvk): build view from this schema self.schema = pkcollections.json_load_any( pkio.py_path(pkresource.filename('schema.json')), ) self.view_type_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=VIEW_TYPES, ) self.view_type_list.observe(self._update_viewer, names='value') view_type_list_grp = _label_grp(self.view_type_list, 'View') self.field_type_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=radia_tk.FIELD_TYPES, ) self.field_type_list.observe(self._update_viewer, names='value') field_type_list_grp = _label_grp(self.field_type_list, 'Field') self.path_type_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=PATH_TYPES, ) self.path_type_list.observe(self._update_viewer, names='value') self.path_type_list_grp = _label_grp(self.path_type_list, 'Path') # behavior changes depending on path type chosen self.new_field_point_btn = ipywidgets.Button( description='+', layout={'width': 'fit-content'}, ) self.new_field_point_btn.on_click(self._add_field_point) self.line_begin_pt_flds, line_begin_point_coords_grp = _coord_grp( [-10, 0, 0], {'width': '64px'}) line_begin_grp = _label_grp(line_begin_point_coords_grp, 'Begin') self.line_end_pt_flds, line_end_point_coords_grp = _coord_grp( [10, 0, 0], {'width': '64px'}) line_end_grp = _label_grp(line_end_point_coords_grp, 'End') self.path_num_pts = ipywidgets.IntText(value=10, min=2, max=100, step=1, layout={'width': '48px'}) num_pts_grp = _label_grp(self.path_num_pts, 'Num Points') self.line_grp = ipywidgets.HBox([ line_begin_grp, line_end_grp, num_pts_grp, self.new_field_point_btn ], layout={'padding': '0 6px 0 0'}) self.circle_ctr_flds, circle_ctr_coords_grp = _coord_grp( [0, 0, 0], {'width': '64px'}) circle_ctr_grp = _label_grp(circle_ctr_coords_grp, 'Center') self.circle_radius = ipywidgets.BoundedFloatText( min=0.1, max=1000, value=10.0, layout={'width': '48px'}) circle_radius_grp = _label_grp(self.circle_radius, 'Radius') self.circle_theta = ipywidgets.BoundedFloatText( min=-math.pi, max=math.pi, step=0.1, value=0.0, layout={'width': '48px'}) circle_theta_grp = _label_grp(self.circle_theta, '𝞱') self.circle_phi = ipywidgets.BoundedFloatText(min=-math.pi, max=math.pi, step=0.1, value=0.0, layout={'width': '48px'}) circle_phi_grp = _label_grp(self.circle_phi, '𝞿') self.circle_grp = ipywidgets.HBox([ circle_ctr_grp, circle_radius_grp, circle_theta_grp, circle_phi_grp, num_pts_grp, self.new_field_point_btn ], layout={'padding': '0 6px 0 0'}) #self.pt_file_btn = ipywidgets.FileUpload() self.pt_file_btn = ipywidgets.Button(description='Choose', layout={'width': 'fit-content'}) self.pt_file_btn.on_click(self._upload) self.pt_file_label = ipywidgets.Label('<None>') self.pt_file_label.add_class('rs-file-input-label') self.observe(self._data_loaded, names='file_data') self.pt_file_grp = ipywidgets.HBox( [self.pt_file_btn, self.pt_file_label, self.new_field_point_btn], layout={'padding': '0 6px 0 0'}) self.geom_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, options=[n for n in self.mgr.get_geoms()]) self.geom_list.observe(self._set_current_geom, names='value') geom_list_grp = _label_grp(self.geom_list, 'Geometry') self.field_color_map_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, ) # the options/value of a dropdown are not syncable! We'll work around it self.field_color_map_list.observe(self._set_field_color_map, names='value') field_map_grp = _label_grp(self.field_color_map_list, 'Color Map') field_map_grp.layout = ipywidgets.Layout(padding='0 6px 0 0') self.vector_scaling_list = ipywidgets.Dropdown( layout={'width': 'max-content'}, ) self.vector_scaling_list.observe(self._set_vector_scaling, names='value') vector_scaling_grp = _label_grp(self.vector_scaling_list, 'Scaling') self.new_field_pt_flds, new_field_point_coords_grp = _coord_grp( [0, 0, 0]) self.new_field_point_grp = ipywidgets.HBox( [new_field_point_coords_grp, self.new_field_point_btn], layout={'padding': '0 6px 0 0'}) self.new_field_point_btn_actions = [ self._add_field_point, self._add_field_line, self._add_field_circle, self._add_field_file ] self.vector_props_grp = ipywidgets.HBox( [field_map_grp, vector_scaling_grp]) self.vector_grp = ipywidgets.HBox([ field_type_list_grp, self.path_type_list_grp, self.line_grp, self.circle_grp, self.new_field_point_grp, self.pt_file_grp, ]) geom_grp = ipywidgets.HBox( [geom_list_grp, view_type_list_grp, self.vector_props_grp], layout={'padding': '3px 0px 3px 0px'}) self.solve_prec = ipywidgets.BoundedFloatText( value=0.0001, min=1e-06, max=10.0, step=1e-06, layout={'width': '72px'}, ) solve_prec_grp = _label_grp( self.solve_prec, 'Precision (' + radia_tk.FIELD_UNITS[radia_tk.FIELD_TYPE_MAG_B] + ')') self.solve_max_iter = ipywidgets.BoundedIntText( value=1500, min=1, max=1e6, step=100, layout={'width': '72px'}, ) solve_max_iter_grp = _label_grp(self.solve_max_iter, 'Max Iterations') self.solve_method = ipywidgets.Dropdown( layout={'width': 'max-content'}, value=0, options=[('0', 0), ('3', 3), ('4', 4), ('5', 5)]) solve_method_grp = _label_grp(self.solve_method, 'Method', layout={}) self.solve_btn = ipywidgets.Button( description='Solve', layout={'width': 'fit-content'}, ) self.solve_btn.on_click(self._solve) spnr = pkio.read_binary(pkresource.filename('sirepo_animated.gif')) self.solve_spinner = ipywidgets.Image(value=spnr, format='gif', width=24, height=24) self.solve_spinner.layout.display = 'none' self.solve_res_label = ipywidgets.Label() #self.export_btn = ipywidgets.Button( # description='Export', # layout={'width': 'fit-content'}, #) #self.export_btn.on_click(self._export) #self.export_link = ipywidgets.HTML( # value='<a href="#" download="xxx">Export</a>' #) #self.export_link.add_class('radia-file-output') self.reset_btn = ipywidgets.Button( description='Reset', layout={'width': 'fit-content'}, ) self.reset_btn.on_click(self._reset) solve_grp = ipywidgets.HBox( [ solve_prec_grp, solve_max_iter_grp, solve_method_grp, self.solve_btn, self.solve_spinner, self.solve_res_label, #self.export_btn, #self.export_link ], layout={'padding': '3px 0px 3px 0px'}) # for enabling/disabling as a whole self.controls = [ self.field_color_map_list, self.new_field_point_btn, self.field_type_list, self.path_type_list, self.solve_btn, self.solve_method, self.solve_max_iter, self.solve_prec, self.vector_scaling_list, self.view_type_list, ] controls_grp = ipywidgets.VBox([geom_grp, solve_grp], layout={'padding': '8px 4px 4px 4px'}) self.observe(self._set_client_props, names='client_props') super(RadiaViewer, self).__init__(children=[ self.vtk_viewer, geom_grp, self.vector_grp, solve_grp, self.out ])
import sirepo.template import threading import time import werkzeug.exceptions #: Json files JSON_SUFFIX = '.json' #: Schema common values, e.g. version SCHEMA_COMMON = None #: Simulation file name is globally unique to avoid collisions with simulation output SIMULATION_DATA_FILE = 'sirepo-data' + JSON_SUFFIX #: The root of the pkresource tree (package_data) RESOURCE_FOLDER = pkio.py_path(pkresource.filename('')) #: Where server files and static files are found STATIC_FOLDER = RESOURCE_FOLDER.join('static') #: Verify ID _IS_PARALLEL_RE = re.compile('animation', re.IGNORECASE) #: How to find examples in resources _EXAMPLE_DIR = 'examples' #: Valid characters in ID _ID_CHARS = numconv.BASE62 #: length of ID _ID_LEN = 8
out_dict[data_list[i][0]] = data_list[i][2] return out_dict class Struct: def __init__(self, **entries): self.__dict__.update(entries) # For sourceIntensityReport: try: import py.path from pykern import pkresource static_dir = py.path.local(pkresource.filename('static')) except: static_dir = '/home/vagrant/src/radiasoft/sirepo/sirepo/package_data/static' static_js_dir = static_dir + '/js' static_json_dir = static_dir + '/json' def get_default_drift(): """The function parses srw.js file to find the default values for drift propagation parameters, which can be sometimes missed in the exported .py files (when distance = 0), but should be presented in .json files. :return default_drift_prop: found list as a string. """ try:
import numpy as np from pykern import pkresource #General constants _ElMass_kg=9.10938e-31 _Elch=1.60217662e-19 _LightSp=299792458 _ElMass_MeV=0.5109989 _Planck_eVs=4.135667662e-15 #Load numerical arrays for universal functions #fluxcorrectionarray = np.loadtxt("resource/gwSrwBrilUndHarmUnivFlux.txt") #divcorrectionarray = np.loadtxt("resource/gwSrwBrilUndHarmUnivDiv.txt") #sizecorrectionarray = np.loadtxt("resource/gwSrwBrilUndHarmUnivSize.txt") fluxcorrectionarray = np.loadtxt(pkresource.filename("template/srw/brilliance/gwSrwBrilUndHarmUnivFlux.txt")) divcorrectionarray = np.loadtxt(pkresource.filename("template/srw/brilliance/gwSrwBrilUndHarmUnivDiv.txt")) sizecorrectionarray = np.loadtxt(pkresource.filename("template/srw/brilliance/gwSrwBrilUndHarmUnivSize.txt")) #srwlib.srwl_uti_read_data_cols #np.array(srwlib.srwl_uti_read_data_cols('gwSrwBrilUndHarmUnivFlux.txt', '\t')) #srwl_uti_interp_2d(_x, _y, _x_min, _x_step, _nx, _y_min, _y_step, _ny, _ar_f, _ord=3, _ix_per=1, _ix_ofst=0) #Undulator K and E functions def getK(By,lam_u): """Return K value :param By: vertical magnetic field [T] :param lam_u: undulator period [m] """ return (_Elch/2/math.pi/_ElMass_kg/_LightSp)*By*lam_u def getE(nHarm, Ebeam, K, lam_u):
def __init__(self, defaults, params, file_prefix, parent=None): super(WidgetView, self).__init__(parent) #self.setWindowTitle(rt_qt.i18n_text(defaults.decl.label)) self.setStyleSheet(pkio.read_text(pkresource.filename(file_prefix + '_popup.css'))) self._form = Form(defaults, params, self, with_button=False)
DEFAULT_INTENSITY_DISTANCE = 20 #: Input json file INPUT_BASE_NAME = 'in' #: Output json file OUTPUT_BASE_NAME = 'out' #: Python file (not all simulations) PARAMETERS_PYTHON_FILE = 'parameters.py' #: stderr and stdout RUN_LOG = 'run.log' RESOURCE_DIR = py.path.local(pkresource.filename('template')) LIB_FILE_PARAM_RE = re.compile(r'.*File$') _WATCHPOINT_REPORT_NAME = 'watchpointReport' def flatten_data(d, res, prefix=''): """Takes a nested dictionary and converts it to a single level dictionary with flattened keys.""" for k in d: v = d[k] if isinstance(v, dict): flatten_data(v, res, prefix + k + '_') elif isinstance(v, list): pass else:
from __future__ import absolute_import, division, print_function from pykern import pkcollections from pykern import pkio from pykern import pkjson from pykern import pkresource from pykern import pkrunpy from pykern.pkdebug import pkdlog, pkdexc, pkdp import ast import inspect import os import py.path import re import srwl_bl import sirepo.sim_data _JS_DIR = py.path.local(pkresource.filename('static/js')) _SIM_DATA, SIM_TYPE, _SCHEMA = sirepo.sim_data.template_globals('srw') class SRWParser(object): def __init__(self, script, user_filename, arguments, optics_func_name='set_optics'): m = pkrunpy.run_path_as_module(script) if arguments: import shlex arguments = shlex.split(arguments) self.var_param = srwl_bl.srwl_uti_parse_options(m.varParam, use_sys_argv=False, args=arguments) self.replace_mirror_files() self.replace_image_files() try: self.optics = getattr(m, optics_func_name)(self.var_param) except ValueError as e:
from pykern import pkcollections from pykern.pkdebug import pkdlog, pkdexc from srwl_bl import srwl_uti_parse_options, srwl_uti_std_options import ast import inspect import py import py.path import re import traceback try: import cPickle as pickle except Exception: import pickle js_dir = py.path.local(pkresource.filename('static/js')) class SRWParser(object): def __init__(self, script, lib_dir, user_filename, arguments, optics_func_name='set_optics'): self.lib_dir = lib_dir self.initial_lib_dir = lib_dir self.list_of_files = None self.optics_func_name = optics_func_name m = pkrunpy.run_path_as_module(script) varParam = getattr(m, 'varParam')
def examples(app): files = pkio.walk_tree( pkresource.filename(_EXAMPLE_DIR_FORMAT.format(app)), re.escape(JSON_SUFFIX) + '$', ) return [open_json_file(app, str(f)) for f in files]
import zipfile from pykern import pkio from pykern import pkjinja from pykern import pkresource from sirepo.template import template_common import uti_plot_com WANT_BROWSER_FRAME_CACHE = False _MULTI_ELECTRON_FILENAME = 'res_int_pr_me.dat' _PREDEFINED_MAGNETIC_ZIP_FILE = 'magnetic_measurements.zip' #: Where server files and static files are found _STATIC_FOLDER = py.path.local(pkresource.filename('static')) with open(str(_STATIC_FOLDER.join('json/beams.json'))) as f: _PREDEFINED_BEAMS = json.load(f) with open(str(_STATIC_FOLDER.join('json/mirrors.json'))) as f: _PREDEFINED_MIRRORS = json.load(f) with open(str(_STATIC_FOLDER.join('json/srw-schema.json'))) as f: _SCHEMA = json.load(f) def background_percent_complete(data, run_dir, is_running): filename = str(run_dir.join(_MULTI_ELECTRON_FILENAME)) if os.path.isfile(filename): return {
def xtest_find_height_profile_dimension(): from sirepo.template import srw for dimension in (1, 2): dat_file = pkresource.filename('static/dat/mirror_{}d.dat'.format(dimension), srw) found_dimension = srw.find_height_profile_dimension(dat_file) assert found_dimension == dimension
import json import math import ntpath import os import py.path import re import subprocess from pykern import pkresource from pykern.pkdebug import pkdc, pkdlog, pkdp from sirepo import simulation_db from sirepo.template import elegant_lattice_parser _IGNORE_FIELD = ['rootname', 'search_path', 'semaphore_file'] _RPN_DEFN_FILE = str(py.path.local(pkresource.filename('defns.rpn'))) _ANGLE_FIELDS = ['angle', 'kick', 'hkick'] _BEND_TYPES = ['BUMPER', 'CSBEND', 'CSRCSBEND', 'FMULT', 'HKICK', 'KICKER', 'KPOLY', 'KSBEND', 'KQUSE', 'MBUMPER', 'MULT', 'NIBEND', 'NISEPT', 'RBEN', 'SBEN', 'TUBEND'] _DRIFT_TYPES = ['CSRDRIFT', 'DRIF', 'EDRIFT', 'EMATRIX', 'LSCDRIFT'] _IGNORE_LENGTH_TYPES = ['ILMATRIX', 'STRAY', 'SCRIPT'] _LENGTH_FIELDS = ['l', 'xmax', 'length'] _STATIC_FOLDER = py.path.local(pkresource.filename('static')) with open(str(_STATIC_FOLDER.join('json/elegant-default.json'))) as f: _DEFAULTS = json.load(f) _SCHEMA = simulation_db.get_schema('elegant') def _init_types():