def _compute_column_info(dataFile): path = str( simulation_db.simulation_lib_dir(SIM_TYPE).join( _filename(dataFile.file))) if re.search(r'\.npy$', path): return _compute_numpy_info(path) return _compute_csv_info(path)
def _compute_csv_info(filename): res = PKDict( hasHeaderRow=True, rowCount=0, ) row = None with open( simulation_db.simulation_lib_dir( SIM_TYPE, ).join(_filename(filename)), ) as f: for r in csv.reader(f): if not row: row = r res.rowCount += 1 if not row: return PKDict( error='Invalid CSV file: no columns detected' ) # csv file may or may not have column names # if any value in the first row is numeric, assume no headers if list(filter(lambda x: template_common.NUMERIC_RE.search(x), row)): row = ['column {}'.format(i + 1) for i in range(len(row))] res.hasHeaderRow = False res.colsWithNonUniqueValues = _cols_with_non_unique_values( filename, res.hasHeaderRow, row, ) res.header = row res.inputOutput = ['none' for i in range(len(row))] return res
def _sdds_beam_type_from_file(filename): res = '' path = str(simulation_db.simulation_lib_dir(SIM_TYPE).join(filename)) if sdds.sddsdata.InitializeInput(_SDDS_INDEX, path) == 1: res = _sdds_beam_type(sdds.sddsdata.GetColumnNames(_SDDS_INDEX)) sdds.sddsdata.Terminate(_SDDS_INDEX) return res
def app_file_list(simulation_type, simulation_id, file_type): file_type = werkzeug.secure_filename(file_type) res = [] exclude = None #TODO(pjm): use file prefixes for srw, currently assumes mirror is *.dat and others are *.zip if simulation_type == 'srw': if file_type == 'mirror': search = ['*.dat', '*.txt'] elif file_type == 'sample': search = ['*.tif', '*.tiff', '*.TIF', '*.TIFF', '*.npy', '*.NPY'] exclude = '_processed.tif' else: search = ['*.zip'] else: search = ['{}.*'.format(file_type)] d = simulation_db.simulation_lib_dir(simulation_type) for extension in search: for f in glob.glob(str(d.join(extension))): if exclude and re.search(exclude, f): continue if os.path.isfile(f): filename = os.path.basename(f) if not simulation_type == 'srw': # strip the file_type prefix filename = filename[len(file_type) + 1:] res.append(filename) res.sort() return _json_response(res)
def app_import_file(simulation_type): template = sirepo.template.import_module(simulation_type) error, data = template.import_file(flask.request, simulation_db.simulation_lib_dir(simulation_type), simulation_db.tmp_dir()) if error: return _json_response({'error': error}) data['models']['simulation']['folder'] = flask.request.form['folder'] return _save_new_and_reply(simulation_type, data)
def api_listFiles(simulation_type, simulation_id, file_type): # simulation_id is an unused argument file_type = werkzeug.secure_filename(file_type) res = [] exclude = None #TODO(pjm): use file prefixes for srw, currently assumes mirror is *.dat and others are *.zip if simulation_type == 'srw': template = sirepo.template.import_module(simulation_type) search = template.extensions_for_file_type(file_type) if file_type == 'sample': exclude = '_processed.tif' else: search = ['{}.*'.format(file_type)] d = simulation_db.simulation_lib_dir(simulation_type) for extension in search: for f in glob.glob(str(d.join(extension))): if exclude and re.search(exclude, f): continue if os.path.isfile(f): filename = os.path.basename(f) if not simulation_type == 'srw': # strip the file_type prefix filename = filename[len(file_type) + 1:] res.append(filename) res.sort() return http_reply.gen_json(res)
def app_upload_file(simulation_type, simulation_id, file_type): f = flask.request.files['file'] lib = simulation_db.simulation_lib_dir(simulation_type) template = sirepo.template.import_module(simulation_type) filename = werkzeug.secure_filename(f.filename) if simulation_type == 'srw': p = lib.join(filename) else: p = lib.join(werkzeug.secure_filename('{}.{}'.format(file_type, filename))) err = None if p.check(): err = 'file exists: {}'.format(filename) if not err: f.save(str(p)) err = template.validate_file(file_type, str(p)) if err: pkio.unchecked_remove(p) if err: return _json_response({ 'error': err, 'filename': filename, 'fileType': file_type, 'simulationId': simulation_id, }) return _json_response({ 'filename': filename, 'fileType': file_type, 'simulationId': simulation_id, })
def _create_file(): from sklearn.preprocessing import LabelEncoder # POSIT: Matches logic in package_data.template.ml.scale.py.jinja.read_data_and_encode_output_column() data = simulation_db.read_json( frame_args.run_dir.join(template_common.INPUT_BASE_NAME), ) v = np.genfromtxt( str(simulation_db.simulation_lib_dir(SIM_TYPE).join( _filename(data.models.dataFile.file), )), delimiter=',', skip_header=data.models.columnInfo.hasHeaderRow, dtype=None, encoding='utf-8', ) o = data.models.columnInfo.inputOutput.index('output') c = v[f'f{o}'] e = LabelEncoder().fit(c) res = PKDict( zip( e.transform(e.classes_).astype(np.float).tolist(), e.classes_, ), ) pkjson.dump_pretty( res, filename=_OUTPUT_FILE.classificationOutputColEncodingFile, ) return res
def app_file_list(simulation_type, simulation_id): res = [] d = simulation_db.simulation_lib_dir(simulation_type) for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): res.append(os.path.basename(f)) res.sort() return json.dumps(res)
def _lib_file_datetime(filename): lib_filename = template_common.lib_file_name('analysisData', 'file', filename) path = simulation_db.simulation_lib_dir(SIM_TYPE).join(lib_filename) if path.exists(): return {filename: path.mtime()} pkdlog('error, missing lib file: {}', path) return 0
def _compute_file_column_count(files): return _compute_column_count( simulation_db.simulation_lib_dir(SIM_TYPE), _SIM_DATA.lib_file_name_with_model_field('files', 'inputs', files.inputs), _SIM_DATA.lib_file_name_with_model_field('files', 'outputs', files.outputs), files, )
def api_downloadFile(simulation_type, simulation_id, filename): lib = simulation_db.simulation_lib_dir(simulation_type) filename = werkzeug.secure_filename(filename) p = lib.join(filename) if simulation_type == 'srw': attachment_name = filename else: # strip file_type prefix from attachment filename attachment_name = re.sub(r'^.*?-.*?\.', '', filename) return flask.send_file(str(p), as_attachment=True, attachment_filename=attachment_name)
def app_file_list(simulation_type, simulation_id, file_type): res = [] #TODO(pjm): keep files in folder by file_type instead of by extension search = '*.dat' if file_type == 'mirror' else '*.zip' d = simulation_db.simulation_lib_dir(simulation_type) for f in glob.glob(str(d.join(search))): if os.path.isfile(f): res.append(os.path.basename(f)) res.sort() return json.dumps(res)
def get_application_data(data, **kwargs): #pkdp('get_application_data from {}', data) if 'method' not in data: raise RuntimeError('no application data method') if data.method not in _METHODS: raise RuntimeError('unknown application data method: {}'.format( data.method)) g_id = -1 try: with open(str(_dmp_file(data.simulationId)), 'rb') as f: b = f.read() g_id = radia_tk.load_bin(b) except IOError: # No Radia dump file return {} if data.method == 'get_field': f_type = data.get('fieldType') #pkdp('FT {}', f_type) if f_type in radia_tk.POINT_FIELD_TYPES: #TODO(mvk): won't work for subsets of available paths, figure that out pass #try: # res = _read_data(data.simulationId, data.viewType, f_type) #except KeyError: # res = None #pkdp('READ RES {}', res) #if res: # v = [d.vectors.vertices for d in res.data if 'vectors' in d] # old_pts = [p for a in v for p in a] # new_pts = _build_field_points(data.fieldPaths) #pkdp('CHECK FOR CHANGE OLD {} VS NEW {}', old_pts, new_pts) # if len(old_pts) == len(new_pts) and numpy.allclose(new_pts, old_pts): # return res #return _read_or_generate(g_id, data) return _generate_field_data(g_id, data.name, f_type, data.fieldPaths) if data.method == 'get_field_integrals': return _generate_field_integrals(g_id, data.fieldPaths) if data.method == 'get_geom': g_types = data.get('geomTypes', ['lines', 'polygons']) res = _read_or_generate(g_id, data) res.data = [{k: d[k] for k in d.keys() if k in g_types} for d in res.data] return res if data.method == 'save_field': #pkdp('DATA {}', data) data.method = 'get_field' res = get_application_data(data) if data.fileType == 'sdds': # we save individual field paths, so there will be one item in the list return _save_fm_sdds( res.name, res.data[0], simulation_db.simulation_lib_dir(SIM_TYPE).join( data.simulationId + '_' + res.name + '.' + data.fileType)) return res
def read_zip(stream, template=None): """Read zip file and store contents Args: stream (IO): file to read template (module): expected app Returns: dict: data """ from pykern import pkcollections from sirepo import simulation_db from sirepo.template import template_common import py.path import re import zipfile tmp = simulation_db.tmp_dir() data = None zipped = pkcollections.Dict() with zipfile.ZipFile(stream, 'r') as z: for i in z.infolist(): b = py.path.local(i.filename).basename c = z.read(i) if b.lower() == simulation_db.SIMULATION_DATA_FILE: assert not data, \ 'too many db files {} in archive'.format(b) data = read_json(c, template) if not template: import sirepo.template template = sirepo.template.import_module( data.simulationType) continue if re.match('__MACOSX', i.filename): continue #TODO(robnagler) ignore identical files hash assert not b in zipped, \ '{} duplicate file in archive'.format(i.filename) fn = tmp.join(b) with open(str(fn), 'wb') as f: f.write(c) zipped[b] = fn assert data, \ 'missing {} in archive'.format(simulation_db.SIMULATION_DATA_FILE) needed = pkcollections.Dict() for n in template_common.lib_files(data): assert n.basename in zipped or n.check(file=True, exists=True), \ 'auxiliary file {} missing in archive'.format(n.basename) needed[n.basename] = n lib_d = simulation_db.simulation_lib_dir(template.SIM_TYPE) for b, src in zipped.items(): if b in needed: src.copy(needed[b]) return data
def app_import_file(simulation_type): f = flask.request.files['file'] error, data = sirepo.importer.import_python( f.read(), lib_dir=simulation_db.simulation_lib_dir(simulation_type), tmp_dir=simulation_db.tmp_dir(), user_filename=f.filename, ) if error: return flask.jsonify({'error': error}) return _save_new_and_reply(simulation_type, data)
def get_application_data(data): if data['method'] == 'column_info': data = pkcollections.Dict({ 'models': pkcollections.Dict({ 'analysisData': data['analysisData'], }), }) return { 'columnInfo': _column_info( _analysis_data_path(simulation_db.simulation_lib_dir(SIM_TYPE), data)), } assert False, 'unknown application_data method: {}'.format(data['method'])
def _generate_parameters_file(data): report = data.get('report', '') res, v = template_common.generate_parameters_file(data) sim_id = data.get('simulationId', data.models.simulation.simulationId) g = data.models.geometry v['dmpFile'] = _dmp_file(sim_id) if 'dmpImportFile' in data.models.simulation: v['dmpImportFile'] = simulation_db.simulation_lib_dir(SIM_TYPE).join( f'{_SCHEMA.constants.radiaDmpFileType}.{data.models.simulation.dmpImportFile}' ) v['isExample'] = data.models.simulation.get('isExample', False) v.objects = g.get('objects', []) # read in h-m curves if applicable for o in v.objects: o.h_m_curve = _read_h_m_file(o.materialFile) if \ o.get('material', None) and o.material == 'custom' and \ o.get('materialFile', None) and o.materialFile else None v['geomName'] = g.name disp = data.models.magnetDisplay v_type = disp.viewType f_type = None if v_type not in VIEW_TYPES: raise ValueError('Invalid view {} ({})'.format(v_type, VIEW_TYPES)) v['viewType'] = v_type v['dataFile'] = _geom_file(sim_id) if v_type == _SCHEMA.constants.viewTypeFields: f_type = disp.fieldType if f_type not in radia_tk.FIELD_TYPES: raise ValueError( 'Invalid field {} ({})'.format(f_type, radia_tk.FIELD_TYPES) ) v['fieldType'] = f_type v['fieldPoints'] = _build_field_points(data.models.fieldPaths.get('paths', [])) if 'solver' in report: v['doSolve'] = True s = data.models.solver v['solvePrec'] = s.precision v['solveMaxIter'] = s.maxIterations v['solveMethod'] = s.method if 'reset' in report: radia_tk.reset() data.report = 'geometry' return _generate_parameters_file(data) v['h5ObjPath'] = _geom_h5_path(_SCHEMA.constants.viewTypeObjects) v['h5FieldPath'] = _geom_h5_path(_SCHEMA.constants.viewTypeFields, f_type) return template_common.render_jinja( SIM_TYPE, v, GEOM_PYTHON_FILE, )
def app_import_file(simulation_type): f = flask.request.files['file'] arguments = str(flask.request.form['arguments']) pkdp('\n\tFile: {}\n\tArguments: {}', f.filename, arguments) error, data = sirepo.importer.import_python( f.read(), lib_dir=simulation_db.simulation_lib_dir(simulation_type), tmp_dir=simulation_db.tmp_dir(), user_filename=f.filename, arguments=arguments, ) if error: return flask.jsonify({'error': error}) return _save_new_and_reply(simulation_type, data)
def read_zip(stream, template=None): """Read zip file and store contents Args: stream (IO): file to read template (module): expected app Returns: dict: data """ from pykern import pkcollections from sirepo import simulation_db from sirepo.template import template_common import py.path import zipfile tmp = simulation_db.tmp_dir() data = None zipped = pkcollections.Dict() with zipfile.ZipFile(stream, 'r') as z: for i in z.infolist(): b = py.path.local(i.filename).basename c = z.read(i) if b.lower() == simulation_db.SIMULATION_DATA_FILE: assert not data, \ 'too many db files {} in archive'.format(b) data = read_json(c, template) if not template: import sirepo.template template = sirepo.template.import_module(data.simulationType) continue #TODO(robnagler) ignore identical files hash assert not b in zipped, \ '{} duplicate file in archive'.format(i.filename) fn = tmp.join(b) with open(str(fn), 'wb') as f: f.write(c) zipped[b] = fn assert data, \ 'missing {} in archive'.format(simulation_db.SIMULATION_DATA_FILE) needed = pkcollections.Dict() for n in template_common.lib_files(data): assert n.basename in zipped or n.check(file=True, exists=True), \ 'auxiliary file {} missing in archive'.format(n.basename) needed[n.basename] = n lib_d = simulation_db.simulation_lib_dir(template.SIM_TYPE) for b, src in zipped.items(): if b in needed: src.copy(needed[b]) return data
def import_file(req, tmp_dir=None, **kwargs): if not pkio.has_file_extension(req.filename, 'zip'): raise sirepo.util.UserAlert('unsupported import filename: {}'.format(filename)) #TODO(pjm): writing to simulation lib for now, tmp_dir will get removed after this request filepath = str(simulation_db.simulation_lib_dir(SIM_TYPE).join(_ZIP_FILE_NAME)) pkio.mkdir_parent_only(filepath) with open(filepath, 'wb') as f: f.write(req.file_stream.read()) data = simulation_db.default_data(SIM_TYPE) data['models']['simulation']['name'] = req.filename data['models']['simulation'][_TMP_INPUT_FILE_FIELD] = filepath # more processing occurs in prepare_for_client() via: # import_file => _save_new_and_reply => api_simulationData => prepare_for_client return data
def lib_files(data, source_lib=None): """Return list of files used by the simulation Args: data (dict): sim db Returns: list: py.path.local to files """ from sirepo import simulation_db sim_type = data.simulationType return sirepo.template.import_module(data).lib_files( data, source_lib or simulation_db.simulation_lib_dir(sim_type), )
def _link_or_unlink_proprietary_files(sim_type, should_link): d = proprietary_code_dir(sim_type) for e in simulation_db.examples(sim_type): b = sim_data.get_class(sim_type).proprietary_lib_file_basename(e) p = simulation_db.simulation_lib_dir(sim_type).join(b) if not should_link: pkio.unchecked_remove(p) continue try: p.mksymlinkto( d.join(b), absolute=False, ) except py.error.EEXIST: pass
def api_importFile(simulation_type=None): """ Args: simulation_type (str): which simulation type Params: file: file data folder: where to import to """ import sirepo.importer error = None f = None try: template = simulation_type and sirepo.template.import_module( simulation_type) f = flask.request.files.get('file') assert f, \ ValueError('must supply a file') if pkio.has_file_extension(f.filename, 'json'): data = sirepo.importer.read_json(f.read(), template) #TODO(pjm): need a separate URI interface to importer, added exception for rs4pi for now # (dicom input is normally a zip file) elif pkio.has_file_extension(f.filename, 'zip') and simulation_type != 'rs4pi': data = sirepo.importer.read_zip(f.stream, template) else: assert simulation_type, \ 'simulation_type is required param for non-zip|json imports' assert hasattr(template, 'import_file'), \ ValueError('Only zip files are supported') data = template.import_file( flask.request, simulation_db.simulation_lib_dir(simulation_type), simulation_db.tmp_dir(), ) #TODO(robnagler) need to validate folder data.models.simulation.folder = flask.request.form['folder'] data.models.simulation.isExample = False return _save_new_and_reply(data) except Exception as e: pkdlog('{}: exception: {}', f and f.filename, pkdexc()) error = str(e.message) if hasattr(e, 'message') else str(e) return http_reply.gen_json({ 'error': error if error else 'An unknown error occurred', })
def _lib_file_list(cls, pat, want_user_lib_dir=True): """Unsorted list of absolute paths matching glob pat Only works locally. """ cls._assert_server_side() from sirepo import simulation_db res = PKDict() x = [cls.lib_file_resource_dir()] if want_user_lib_dir: # lib_dir overwrites resource_dir x.append(simulation_db.simulation_lib_dir(cls.sim_type())) for d in x: for f in pkio.sorted_glob(d.join(pat)): res[f.basename] = f return res.values()
def lib_files_from_other_user(cls, data, other_lib_dir): """Copy auxiliary files to other user Does not copy resource files. Only works locally. Args: data (dict): simulation db other_lib_dir (py.path): source directory """ cls._assert_server_side() from sirepo import simulation_db t = simulation_db.simulation_lib_dir(cls.sim_type()) for f in cls._lib_file_basenames(data): s = other_lib_dir.join(f) if s.exists(): s.copy(t.join(f))
def _cols_with_non_unique_values(filename, has_header_row, header): # TODO(e-carlin): support npy assert not re.search(r'\.npy$', str(filename)), \ f'numpy files are not supported path={filename}' v = np.genfromtxt( str(simulation_db.simulation_lib_dir(SIM_TYPE).join( _filename(filename), )), delimiter=',', skip_header=True, ) res = PKDict() for i, c in enumerate(np.all(v == v[0,:], axis = 0)): if not c: continue res[header[i]] = True return res
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) for d in simulation_db.simulation_dir( simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) template.prepare_aux_files(run_dir, data) simulation_db.save_simulation_json(simulation_type, data) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, )) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def app_file_list(simulation_type, simulation_id, file_type): file_type = werkzeug.secure_filename(file_type) res = [] #TODO(pjm): use file prefixes for srw, currently assumes mirror is *.dat and others are *.zip if simulation_type == 'srw': search = ['*.dat', '*.txt'] if file_type == 'mirror' else ['*.zip'] else: search = ['{}.*'.format(file_type)] d = simulation_db.simulation_lib_dir(simulation_type) for extension in search: for f in glob.glob(str(d.join(extension))): if os.path.isfile(f): filename = os.path.basename(f) if not simulation_type == 'srw': # strip the file_type prefix filename = filename[len(file_type) + 1:] res.append(filename) res.sort() return _json_response(res)
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) simulation_db.save_simulation_json(simulation_type, data) for d in simulation_db.simulation_dir(simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, ) ) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def get_application_data(data): if data['method'] == 'column_info': data = pkcollections.Dict({ 'models': pkcollections.Dict({ 'analysisData': data['analysisData'], }), }) return { 'columnInfo': _column_info( _analysis_data_path(simulation_db.simulation_lib_dir(SIM_TYPE), data)), } if data['method'] == 'update_kicker': return _update_epics_kicker(data) if data['method'] == 'read_kickers': return _read_epics_kickers(data) if data['method'] == 'enable_steering': return _enable_steering(data) assert False, 'unknown application_data method: {}'.format(data['method'])
def _tosca_info(data): # determine the list of available files (from zip if necessary) # compute the tosca length from datafile tosca = data['tosca'] #TODO(pjm): keep a cache on the tosca model? datafile = simulation_db.simulation_lib_dir(SIM_TYPE).join( template_common.lib_file_name('TOSCA', 'magnetFile', tosca['magnetFile'])) if not datafile.exists(): return { 'error': 'missing or invalid file: {}'.format(tosca['magnetFile']), } error = None length = None if _is_zip_file(datafile): with zipfile.ZipFile(str(datafile), 'r') as z: filenames = [] if 'fileNames' not in tosca or not tosca['fileNames']: tosca['fileNames'] = [] for info in z.infolist(): filenames.append(info.filename) if not length and info.filename in tosca['fileNames']: length, error = _tosca_length(tosca, z.read(info).splitlines()) if length: error = None else: filenames = [tosca['magnetFile']] with pkio.open_text(str(datafile)) as f: length, error = _tosca_length(tosca, f) if error: return {'error': error} return { 'toscaInfo': { 'toscaLength': length, 'fileList': sorted(filenames) if filenames else None, 'magnetFile': tosca['magnetFile'], }, }
def app_upload_file(simulation_type, simulation_id): f = flask.request.files['file'] lib = simulation_db.simulation_lib_dir(simulation_type) filename = werkzeug.secure_filename(f.filename) p = lib.join(filename) err = None if p.check(): err = 'file exists: {}'.format(filename) if not err: f.save(str(p)) err = _validate_data_file(p) if err: pkio.unchecked_remove(p) if err: return flask.jsonify({ 'error': err, 'filename': filename, 'simulationId': simulation_id, }) return flask.jsonify({ 'filename': filename, 'simulationId': simulation_id, })
def api_importFile(simulation_type=None): """ Args: simulation_type (str): which simulation type Params: file: file data folder: where to import to """ import sirepo.importer error = None f = None try: template = simulation_type and sirepo.template.import_module(simulation_type) f = flask.request.files.get('file') assert f, \ ValueError('must supply a file') if pkio.has_file_extension(f.filename, 'json'): data = sirepo.importer.read_json(f.read(), template) elif pkio.has_file_extension(f.filename, 'zip'): data = sirepo.importer.read_zip(f.stream, template) else: assert simulation_type, \ 'simulation_type is required param for non-zip|json imports' data = template.import_file( flask.request, simulation_db.simulation_lib_dir(simulation_type), simulation_db.tmp_dir(), ) #TODO(robnagler) need to validate folder data.models.simulation.folder = flask.request.form['folder'] return _save_new_and_reply(data.simulationType, data) except Exception as e: pkdlog('{}: exception: {}', f and f.filename, pkdexc()) error = e.message if hasattr(e, 'message') else str(e) return _json_response({'error': error})
def prepare_aux_files(run_dir, data): _copy_lib_files(data, simulation_db.simulation_lib_dir(_SIMULATION_TYPE), run_dir)
def app_download_file(simulation_type, simulation_id, filename): lib = simulation_db.simulation_lib_dir(simulation_type) p = lib.join(werkzeug.secure_filename(filename)) return flask.send_file(str(p), as_attachment=True, attachment_filename=filename)
def _lib_filepath(simulation_type, filename, file_type): lib = simulation_db.simulation_lib_dir(simulation_type) return lib.join(_lib_filename(simulation_type, filename, file_type))