def api_updateFolder(): #TODO(robnagler) Folder should have a serial, or should it be on data req = http_request.parse_post() o = srschema.parse_folder(req.req_data['oldName']) if o == '/': raise sirepo.util.Error( 'cannot rename root ("/") folder', 'old folder is root req={}', req, ) n = srschema.parse_folder(req.req_data['newName']) if n == '/': raise sirepo.util.Error( 'cannot rename folder to root ("/")', 'new folder is root req={}', req, ) for r in simulation_db.iterate_simulation_datafiles( req.type, _simulation_data_iterator): f = r.models.simulation.folder l = o.lower() if f.lower() == o.lower(): r.models.simulation.folder = n elif f.lower().startswith(o.lower() + '/'): r.models.simulation.folder = n + f[len():] else: continue simulation_db.save_simulation_json(r) return http_reply.gen_json_ok()
def app_run_cancel(): data = _json_input() data['models']['simulationStatus']['state'] = 'canceled' simulation_type = data['simulationType'] simulation_db.save_simulation_json(simulation_type, data) cfg.job_queue.kill(simulation_db.parse_sid(data)) # the last frame file may not be finished, remove it t = sirepo.template.import_module(simulation_type) t.remove_last_frame(simulation_db.simulation_run_dir(data)) return '{}'
def app_update_folder(): #TODO(robnagler) Folder should have a serial, or should it be on data data = _parse_data_input() old_name = data['oldName'] new_name = data['newName'] for row in simulation_db.iterate_simulation_datafiles(data['simulationType'], _simulation_data): folder = row['models']['simulation']['folder'] if folder.startswith(old_name): row['models']['simulation']['folder'] = re.sub(re.escape(old_name), new_name, folder, 1) simulation_db.save_simulation_json(data['simulationType'], row) return _json_response_ok();
def api_updateFolder(): #TODO(robnagler) Folder should have a serial, or should it be on data data = _parse_data_input() old_name = data['oldName'] new_name = data['newName'] for row in simulation_db.iterate_simulation_datafiles(data['simulationType'], _simulation_data): folder = row['models']['simulation']['folder'] if folder.startswith(old_name): row['models']['simulation']['folder'] = re.sub(re.escape(old_name), new_name, folder, 1) simulation_db.save_simulation_json(row) return http_reply.gen_json_ok()
def _move_import_file(data): sim = data['models']['simulation'] path = sim[_TMP_INPUT_FILE_FIELD] del sim[_TMP_INPUT_FILE_FIELD] if os.path.exists(path): zip_path = _sim_file(sim['simulationId'], _ZIP_FILE_NAME) os.rename(path, zip_path) tmp_dir = _sim_file(sim['simulationId'], _TMP_ZIP_DIR) zipfile.ZipFile(zip_path).extractall(tmp_dir) _summarize_dicom_files(data, tmp_dir) pkio.unchecked_remove(tmp_dir) simulation_db.save_simulation_json(data)
def _move_import_file(data): sim = data['models']['simulation'] path = sim[_TMP_INPUT_FILE_FIELD] del sim[_TMP_INPUT_FILE_FIELD] if os.path.exists(path): zip_path = _sim_file(sim['simulationId'], _ZIP_FILE_NAME) os.rename(path, zip_path) pkio.unchecked_remove(os.path.dirname(path)) tmp_dir = _sim_file(sim['simulationId'], _TMP_ZIP_DIR) zipfile.ZipFile(zip_path).extractall(tmp_dir) _summarize_dicom_files(data, tmp_dir) pkio.unchecked_remove(tmp_dir) simulation_db.save_simulation_json(data)
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) for d in simulation_db.simulation_dir( simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) template.prepare_aux_files(run_dir, data) simulation_db.save_simulation_json(simulation_type, data) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, )) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) simulation_db.save_simulation_json(simulation_type, data) for d in simulation_db.simulation_dir(simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, ) ) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def api_saveSimulationData(): # do not fixup_old_data yet req = http_request.parse_post(id=True, template=True) d = req.req_data simulation_db.validate_serial(d) d = simulation_db.fixup_old_data(d)[0] if hasattr(req.template, 'prepare_for_save'): d = req.template.prepare_for_save(d) d = simulation_db.save_simulation_json(d) return api_simulationData( d.simulationType, d.models.simulation.simulationId, )
def app_run_status(): data = _json_input() sid = simulation_db.parse_sid(data) simulation_type = data['simulationType'] template = sirepo.template.import_module(simulation_type) run_dir = simulation_db.simulation_run_dir(data) if cfg.job_queue.is_running(sid): completion = template.background_percent_complete(data, run_dir, True) state = 'running' else: data = simulation_db.open_json_file(simulation_type, sid=sid) state = data['models']['simulationStatus']['state'] completion = template.background_percent_complete(data, run_dir, False) if state == 'running': if completion['frame_count'] == completion['total_frames']: state = 'completed' else: state = 'canceled' data['models']['simulationStatus']['state'] = state simulation_db.save_simulation_json(data['simulationType'], data) frame_id = '' elapsed_time = '' if 'last_update_time' in completion: frame_id = completion['last_update_time'] elapsed_time = int(frame_id) - int( data['models']['simulationStatus']['startTime']) return flask.jsonify({ 'state': state, 'percentComplete': completion['percent_complete'], 'frameCount': completion['frame_count'], 'totalFrames': completion['total_frames'], 'frameId': frame_id, 'elapsedTime': elapsed_time, })
def app_save_simulation_data(): data = _parse_data_input(validate=True) res = _validate_serial(data) if res: return res simulation_type = data['simulationType'] data = simulation_db.save_simulation_json( simulation_type, sirepo.template.import_module(simulation_type).prepare_for_save(data), ) return app_simulation_data( data['simulationType'], data['models']['simulation']['simulationId'], pretty=False, )
def api_saveSimulationData(): data = _parse_data_input(validate=True) res = _validate_serial(data) if res: return res simulation_type = data['simulationType'] template = sirepo.template.import_module(simulation_type) if hasattr(template, 'prepare_for_save'): data = template.prepare_for_save(data) data = simulation_db.save_simulation_json(data) return api_simulationData( data['simulationType'], data['models']['simulation']['simulationId'], pretty=False, )
def app_run_status(): data = _json_input() sid = simulation_db.parse_sid(data) simulation_type = data['simulationType'] template = sirepo.template.import_module(simulation_type) run_dir = simulation_db.simulation_run_dir(data) if cfg.job_queue.is_running(sid): completion = template.background_percent_complete(data, run_dir, True) state = 'running' else: data = simulation_db.open_json_file(simulation_type, sid=sid) state = data['models']['simulationStatus']['state'] completion = template.background_percent_complete(data, run_dir, False) if state == 'running': if completion['frame_count'] == completion['total_frames']: state = 'completed' else: state = 'canceled' data['models']['simulationStatus']['state'] = state simulation_db.save_simulation_json(data['simulationType'], data) frame_id = '' elapsed_time = '' if 'last_update_time' in completion: frame_id = completion['last_update_time'] elapsed_time = int(frame_id) - int(data['models']['simulationStatus']['startTime']) return flask.jsonify({ 'state': state, 'percentComplete': completion['percent_complete'], 'frameCount': completion['frame_count'], 'totalFrames': completion['total_frames'], 'frameId': frame_id, 'elapsedTime': elapsed_time, })