def api_runCancel(): data = _parse_data_input() jid = simulation_db.job_id(data) if feature_config.cfg.runner_daemon: jhash = template_common.report_parameters_hash(data) run_dir = simulation_db.simulation_run_dir(data) runner_client.cancel_report_job(run_dir, jhash) # Always true from the client's perspective return http_reply.gen_json({'state': 'canceled'}) else: # TODO(robnagler) need to have a way of listing jobs # Don't bother with cache_hit check. We don't have any way of canceling # if the parameters don't match so for now, always kill. #TODO(robnagler) mutex required if runner.job_is_processing(jid): run_dir = simulation_db.simulation_run_dir(data) # Write first, since results are write once, and we want to # indicate the cancel instead of the termination error that # will happen as a result of the kill. simulation_db.write_result({'state': 'canceled'}, run_dir=run_dir) runner.job_kill(jid) # TODO(robnagler) should really be inside the template (t.cancel_simulation()?) # the last frame file may not be finished, remove it t = sirepo.template.import_module(data) if hasattr(t, 'remove_last_frame'): t.remove_last_frame(run_dir) # Always true from the client's perspective return http_reply.gen_json({'state': 'canceled'})
def compute_field_range(args, compute_range): """ Computes the fieldRange values for all parameters across all animation files. Caches the value on the animation input file. compute_range() is called to read the simulation specific datafiles and extract the ranges by field. """ from sirepo import simulation_db run_dir = simulation_db.simulation_run_dir( PKDict( simulationType=args['simulationType'], simulationId=args['simulationId'], report='animation', )) data = simulation_db.read_json(run_dir.join(INPUT_BASE_NAME)) res = None model_name = args['modelName'] if model_name in data.models: if 'fieldRange' in data.models[model_name]: res = data.models[model_name].fieldRange else: #TODO(pjm): second arg was never used res = compute_range(run_dir, None) data.models[model_name].fieldRange = res simulation_db.write_json(run_dir.join(INPUT_BASE_NAME), data) return PKDict(fieldRange=res)
def _request_content(kwargs): d = kwargs.pkdel('req_data') if not d: #TODO(robnagler) need to use parsed values, ok for now, becasue none of # of the used values are modified by parse_post. If we have files (e.g. file_type, filename), # we need to use those values from parse_post d = sirepo.http_request.parse_post( fixup_old_data=kwargs.pkdel('fixup_old_data', False), id=True, model=True, check_sim_exists=True, ).req_data s = sirepo.sim_data.get_class(d) ##TODO(robnagler) this should be req_data b = PKDict(data=d, **kwargs) # TODO(e-carlin): some of these fields are only used for some type of reqs b.pksetdefault( analysisModel=lambda: s.parse_model(d), computeJobHash=lambda: d.get('computeJobHash') or s.compute_job_hash(d), computeJobSerial=lambda: d.get('computeJobSerial', 0), computeModel=lambda: s.compute_model(d), isParallel=lambda: s.is_parallel(d), #TODO(robnagler) relative to srdb root simulationId=lambda: s.parse_sid(d), simulationType=lambda: d.simulationType, ).pkupdate( reqId=sirepo.job.unique_key(), runDir=str(simulation_db.simulation_run_dir(d)), uid=sirepo.auth.logged_in_user(), ).pkupdate( computeJid=s.parse_jid(d, uid=b.uid), userDir=str(sirepo.simulation_db.user_path(b.uid)), ) return _run_mode(b)
def api_runCancel(): jid = None try: req = http_request.parse_post(id=True, model=True, check_sim_exists=True) jid = req.sim_data.parse_jid(req.req_data) # TODO(robnagler) need to have a way of listing jobs # Don't bother with cache_hit check. We don't have any way of canceling # if the parameters don't match so for now, always kill. #TODO(robnagler) mutex required if runner.job_is_processing(jid): run_dir = simulation_db.simulation_run_dir(req.req_data) # Write first, since results are write once, and we want to # indicate the cancel instead of the termination error that # will happen as a result of the kill. try: simulation_db.write_result({'state': 'canceled'}, run_dir=run_dir) except Exception as e: if not pykern.pkio.exception_is_not_found(e): raise # else: run_dir may have been deleted runner.job_kill(jid) # TODO(robnagler) should really be inside the template (t.cancel_simulation()?) # the last frame file may not be finished, remove it t = sirepo.template.import_module(req.req_data) if hasattr(t, 'remove_last_frame'): t.remove_last_frame(run_dir) except Exception as e: pkdlog('ignoring exception={} jid={} stack={}', e, jid, pkdexc()) # Always true from the client's perspective return http_reply.gen_json({'state': 'canceled'})
def get_application_data(data): if data['method'] == 'get_elegant_sim_list': res = [] for f in pkio.sorted_glob(_elegant_dir().join('*/', _ELEGANT_TWISS_PATH)): m = re.match(r'.*?/elegant/(.*?)/animation', str(f)) if not m: continue id = m.group(1) name = simulation_db.read_json(_elegant_dir().join(id, '/', simulation_db.SIMULATION_DATA_FILE)).models.simulation.name res.append({ 'simulationId': id, 'name': name, }) return { 'simList': res, } elif data['method'] == 'compute_particle_ranges': run_dir = simulation_db.simulation_run_dir({ 'simulationType': SIM_TYPE, 'simulationId': data['simulationId'], 'report': 'animation', }) return { 'fieldRange': _compute_range_across_files(run_dir), }
def api_simulationFrame(frame_id): #TODO(robnagler) startTime is reportParametersHash; need version on URL and/or param names in URL keys = ['simulationType', 'simulationId', 'modelName', 'animationArgs', 'frameIndex', 'startTime'] data = dict(zip(keys, frame_id.split('*'))) template = sirepo.template.import_module(data) data['report'] = template.get_animation_name(data) run_dir = simulation_db.simulation_run_dir(data) model_data = simulation_db.read_json(run_dir.join(template_common.INPUT_BASE_NAME)) if feature_config.cfg.runner_daemon: # XX TODO: it would be better if the frontend passed the jhash to this # call. Since it doesn't, we have to read it out of the run_dir, which # creates a race condition -- we might return a frame from a different # version of the report than the one the frontend expects. jhash = template_common.report_parameters_hash(model_data) frame = runner_client.run_extract_job( run_dir, jhash, 'get_simulation_frame', data, ) else: frame = template.get_simulation_frame(run_dir, data, model_data) resp = http_reply.gen_json(frame) if 'error' not in frame and template.WANT_BROWSER_FRAME_CACHE: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(365) resp.headers['Cache-Control'] = 'public, max-age=31536000' resp.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") resp.headers['Last-Modified'] = now.strftime("%a, %d %b %Y %H:%M:%S GMT") else: _no_cache(resp) return resp
def api_simulationFrame(frame_id): #TODO(robnagler) startTime is reportParametersHash; need version on URL and/or param names in URL keys = [ 'simulationType', 'simulationId', 'modelName', 'animationArgs', 'frameIndex', 'startTime' ] data = dict(zip(keys, frame_id.split('*'))) template = sirepo.template.import_module(data) data['report'] = template.get_animation_name(data) run_dir = simulation_db.simulation_run_dir(data) model_data = simulation_db.read_json( run_dir.join(template_common.INPUT_BASE_NAME)) frame = template.get_simulation_frame(run_dir, data, model_data) response = _json_response(frame) if 'error' not in frame and template.WANT_BROWSER_FRAME_CACHE: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(365) response.headers['Cache-Control'] = 'public, max-age=31536000' response.headers['Expires'] = expires.strftime( "%a, %d %b %Y %H:%M:%S GMT") response.headers['Last-Modified'] = now.strftime( "%a, %d %b %Y %H:%M:%S GMT") else: _no_cache(response) return response
def compute_field_range(args, compute_range): """ Computes the fieldRange values for all parameters across all animation files. Caches the value on the animation input file. compute_range() is called to read the simulation specific datafiles and extract the ranges by field. """ from sirepo import simulation_db run_dir = simulation_db.simulation_run_dir({ 'simulationType': args['simulationType'], 'simulationId': args['simulationId'], 'report': 'animation', }) data = simulation_db.read_json(run_dir.join(INPUT_BASE_NAME)) res = None model_name = args['modelName'] if model_name in data.models: if 'fieldRange' in data.models[model_name]: res = data.models[model_name].fieldRange else: res = compute_range(run_dir, data) data.models[model_name].fieldRange = res simulation_db.write_json(run_dir.join(INPUT_BASE_NAME), data) return { 'fieldRange': res, }
def app_run_cancel(): data = _json_input() data['models']['simulationStatus']['state'] = 'canceled' simulation_type = data['simulationType'] simulation_db.save_simulation_json(simulation_type, data) cfg.job_queue.kill(simulation_db.parse_sid(data)) # the last frame file may not be finished, remove it t = sirepo.template.import_module(simulation_type) t.remove_last_frame(simulation_db.simulation_run_dir(data)) return '{}'
def app_run(): data = _json_input() sid = simulation_db.parse_sid(data) err = _start_simulation(data).run_and_read() run_dir = simulation_db.simulation_run_dir(data) if err: pkdp('error: sid={}, dir={}, out={}', sid, run_dir, err) return flask.jsonify({ 'error': _error_text(err), 'simulationId': sid, }) return pkio.read_text(run_dir.join('out{}'.format(simulation_db.JSON_SUFFIX)))
def get_application_data(data): if data['method'] == 'calculate_bunch_parameters': return _calc_bunch_parameters(data['bunch']) if data['method'] == 'compute_particle_ranges': run_dir = simulation_db.simulation_run_dir({ 'simulationType': SIM_TYPE, 'simulationId': data['simulationId'], 'report': 'animation', }) return { 'fieldRange': _compute_range_across_files(run_dir), } assert False, 'unknown application data method: {}'.format(data['method'])
def app_run(): data = _json_input() sid = simulation_db.parse_sid(data) err = _start_simulation(data).run_and_read() run_dir = simulation_db.simulation_run_dir(data) if err: pkdp('error: sid={}, dir={}, out={}', sid, run_dir, err) return flask.jsonify({ 'error': _error_text(err), 'simulationId': sid, }) return pkio.read_text( run_dir.join('out{}'.format(simulation_db.JSON_SUFFIX)))
def _reqd(req): """Read the run_dir and return cached_data. Only a hit if the models between data and cache match exactly. Otherwise, return cached data if it's there and valid. Args: req (dict): parsed simulation data Returns: Dict: report parameters and hashes """ res = PKDict( cache_hit=False, cached_data=None, cached_hash=None, parameters_changed=False, run_dir=simulation_db.simulation_run_dir(req.req_data), sim_data=req.sim_data, ) res.pkupdate( input_file=simulation_db.json_filename( template_common.INPUT_BASE_NAME, res.run_dir, ), is_parallel=res.sim_data.is_parallel(req.req_data), jid=res.sim_data.parse_jid(req.req_data), job_status=_read_status(res.run_dir), model_name=res.sim_data.parse_model(req.req_data.report), req_hash=(req.req_data.get('computeJobHash') or res.sim_data.compute_job_hash(req.req_data)), ) if not res.run_dir.check(): return res try: c = simulation_db.read_json(res.input_file) except Exception as e: if pykern.pkio.exception_is_not_found(e): return res raise res.cached_data = c # backwards compatibility for old runs that don't have computeJobCacheKey res.cached_hash = c.models.pksetdefault(computeJobCacheKey=lambda: PKDict( computeJobHash=res.sim_data.compute_job_hash(c), computeJobSerial=int(res.input_file.mtime()), ), ).computeJobCacheKey.computeJobHash if res.req_hash == res.cached_hash: res.cache_hit = True return res res.parameters_changed = True return res
def app_download_data_file(simulation_type, simulation_id, model, frame): data = { 'simulationType': simulation_type, 'simulationId': simulation_id, 'modelName': model, } frame = int(frame) template = sirepo.template.import_module(data) if frame >= 0: data['report'] = template.get_animation_name(data) else: data['report'] = model run_dir = simulation_db.simulation_run_dir(data) filename, content, content_type = template.get_data_file(run_dir, model, frame) return _as_attachment(flask.make_response(content), content_type, filename)
def sim_frame_dispatch(frame_args): from sirepo import simulation_db frame_args.pksetdefault( run_dir=lambda: simulation_db.simulation_run_dir(frame_args), ).pksetdefault(sim_in=lambda: simulation_db.read_json( frame_args.run_dir.join(INPUT_BASE_NAME), ), ) t = sirepo.template.import_module(frame_args.simulationType) o = getattr(t, 'sim_frame_' + frame_args.frameReport, None) \ or getattr(t, 'sim_frame') res = o(frame_args) if res is None: raise RuntimeError('unsupported simulation_frame model={}'.format( frame_args.frameReport)) return res
def app_simulation_frame(frame_id): keys = ['simulationType', 'simulationId', 'modelName', 'animationArgs', 'frameIndex', 'startTime'] data = dict(zip(keys, frame_id.split('-'))) run_dir = simulation_db.simulation_run_dir(data) template = sirepo.template.import_module(data['simulationType']) response = flask.jsonify(template.get_simulation_frame(run_dir, data)) if template.WANT_BROWSER_FRAME_CACHE: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(365) response.headers['Cache-Control'] = 'public, max-age=31536000' response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") response.headers['Last-Modified'] = now.strftime("%a, %d %b %Y %H:%M:%S GMT") else: _no_cache(response) return response
def api_downloadDataFile(simulation_type, simulation_id, model, frame, suffix=None): data = { 'simulationType': sirepo.template.assert_sim_type(simulation_type), 'simulationId': simulation_id, 'modelName': model, } options = pkcollections.Dict(data) options.suffix = suffix frame = int(frame) template = sirepo.template.import_module(data) if frame >= 0: data['report'] = template.get_animation_name(data) else: data['report'] = model run_dir = simulation_db.simulation_run_dir(data) filename, content, content_type = template.get_data_file(run_dir, model, frame, options=options) return _as_attachment(flask.make_response(content), content_type, filename)
def app_download_data_file(simulation_type, simulation_id, model_or_frame): data = { 'simulationType': simulation_type, 'simulationId': simulation_id, } frame_index = -1 if re.match(r'^\d+$', model_or_frame): frame_index = int(model_or_frame) else: data['report'] = model_or_frame run_dir = simulation_db.simulation_run_dir(data) template = sirepo.template.import_module(simulation_type) filename, content, content_type = template.get_data_file(run_dir, frame_index) response = flask.make_response(content) response.mimetype = content_type response.headers['Content-Disposition'] = 'attachment; filename="{}"'.format(filename) return response
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) for d in simulation_db.simulation_dir( simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) template.prepare_aux_files(run_dir, data) simulation_db.save_simulation_json(simulation_type, data) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, )) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def app_simulation_frame(frame_id): #TODO(robnagler) startTime is reportParametersHash; need version on URL and/or param names in URL keys = ['simulationType', 'simulationId', 'modelName', 'animationArgs', 'frameIndex', 'startTime'] data = dict(zip(keys, frame_id.split('*'))) template = sirepo.template.import_module(data) data['report'] = template.get_animation_name(data) run_dir = simulation_db.simulation_run_dir(data) model_data = simulation_db.read_json(run_dir.join(template_common.INPUT_BASE_NAME)) response = _json_response(template.get_simulation_frame(run_dir, data, model_data)) if template.WANT_BROWSER_FRAME_CACHE: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(365) response.headers['Cache-Control'] = 'public, max-age=31536000' response.headers['Expires'] = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") response.headers['Last-Modified'] = now.strftime("%a, %d %b %Y %H:%M:%S GMT") else: _no_cache(response) return response
def api_downloadDataFile(simulation_type, simulation_id, model, frame, suffix=None): #TODO(robnagler) validate suffix and frame req = http_request.parse_params( id=simulation_id, model=model, type=simulation_type, check_sim_exists=True, ) f, c, t = sirepo.template.import_module(req.type).get_data_file( simulation_db.simulation_run_dir(req.req_data), req.model, int(frame), options=req.req_data.copy().update(suffix=suffix), ) return http_reply.gen_file_as_attachment(c, f, t)
def app_download_data_file(simulation_type, simulation_id, model_or_frame): data = { 'simulationType': simulation_type, 'simulationId': simulation_id, } frame_index = -1 if re.match(r'^\d+$', model_or_frame): frame_index = int(model_or_frame) else: data['report'] = model_or_frame run_dir = simulation_db.simulation_run_dir(data) template = sirepo.template.import_module(simulation_type) filename, content, content_type = template.get_data_file( run_dir, frame_index) response = flask.make_response(content) response.mimetype = content_type response.headers[ 'Content-Disposition'] = 'attachment; filename="{}"'.format(filename) return response
def _start_simulation(data, run_async=False): """Setup and start the simulation. Args: data (dict): app data run_async (bool): run-background or run Returns: object: _Command or daemon instance """ run_dir = simulation_db.simulation_run_dir(data, remove_dir=True) pkio.mkdir_parent(run_dir) #TODO(robnagler) create a lock_dir -- what node/pid/thread to use? # probably can only do with celery. simulation_type = data['simulationType'] sid = simulation_db.parse_sid(data) data = simulation_db.fixup_old_data(simulation_type, data) assert simulation_type in simulation_db.APP_NAMES, \ '{}: invalid simulation type'.format(simulation_type) template = sirepo.template.import_module(simulation_type) simulation_db.save_simulation_json(simulation_type, data) for d in simulation_db.simulation_dir(simulation_type, sid), simulation_db.simulation_lib_dir(simulation_type): for f in glob.glob(str(d.join('*.*'))): if os.path.isfile(f): py.path.local(f).copy(run_dir) with open(str(run_dir.join('in{}'.format(simulation_db.JSON_SUFFIX))), 'w') as outfile: json.dump(data, outfile) pkio.write_text( run_dir.join(simulation_type + '_parameters.py'), template.generate_parameters_file( data, _schema_cache(simulation_type), run_dir=run_dir, run_async=run_async, ) ) cmd = [_ROOT_CMD, simulation_type] \ + ['run-background' if run_async else 'run'] + [str(run_dir)] if run_async: return cfg.job_queue(sid, run_dir, cmd) return _Command(cmd, cfg.foreground_time_limit)
def api_runCancel(): data = _parse_data_input() jid = simulation_db.job_id(data) # TODO(robnagler) need to have a way of listing jobs # Don't bother with cache_hit check. We don't have any way of canceling # if the parameters don't match so for now, always kill. #TODO(robnagler) mutex required if cfg.job_queue.is_processing(jid): run_dir = simulation_db.simulation_run_dir(data) # Write first, since results are write once, and we want to # indicate the cancel instead of the termination error that # will happen as a result of the kill. simulation_db.write_result({'state': 'canceled'}, run_dir=run_dir) cfg.job_queue.kill(jid) # TODO(robnagler) should really be inside the template (t.cancel_simulation()?) # the last frame file may not be finished, remove it t = sirepo.template.import_module(data) t.remove_last_frame(run_dir) # Always true from the client's perspective return _json_response({'state': 'canceled'})
def app_run_cancel(): data = _parse_data_input() jid = simulation_db.job_id(data) # TODO(robnagler) need to have a way of listing jobs # Don't bother with cache_hit check. We don't have any way of canceling # if the parameters don't match so for now, always kill. #TODO(robnagler) mutex required if cfg.job_queue.is_processing(jid): run_dir = simulation_db.simulation_run_dir(data) # Write first, since results are write once, and we want to # indicate the cancel instead of the termination error that # will happen as a result of the kill. simulation_db.write_result({'state': 'canceled'}, run_dir=run_dir) cfg.job_queue.kill(jid) # TODO(robnagler) should really be inside the template (t.cancel_simulation()?) # the last frame file may not be finished, remove it t = sirepo.template.import_module(data) t.remove_last_frame(run_dir) # Always true from the client's perspective return _json_response({'state': 'canceled'})
def sim_frame_dispatch(frame_args): from sirepo import simulation_db frame_args.pksetdefault( run_dir=lambda: simulation_db.simulation_run_dir(frame_args), ).pksetdefault( sim_in=lambda: simulation_db.read_json( frame_args.run_dir.join(INPUT_BASE_NAME), ), ) t = sirepo.template.import_module(frame_args.simulationType) o = getattr(t, 'sim_frame', None) \ or getattr(t, 'sim_frame_' + frame_args.frameReport) try: res = o(frame_args) except Exception as e: pkdlog('error generating report frame_args={} stack={}', frame_args, pkdexc()) raise sirepo.util.convert_exception(e, display_text='Report not generated') if res is None: raise RuntimeError('unsupported simulation_frame model={}'.format(frame_args.frameReport)) return res
def app_simulation_frame(frame_id): keys = [ 'simulationType', 'simulationId', 'modelName', 'animationArgs', 'frameIndex', 'startTime' ] data = dict(zip(keys, frame_id.split('-'))) run_dir = simulation_db.simulation_run_dir(data) template = sirepo.template.import_module(data['simulationType']) response = flask.jsonify(template.get_simulation_frame(run_dir, data)) if template.WANT_BROWSER_FRAME_CACHE: now = datetime.datetime.utcnow() expires = now + datetime.timedelta(365) response.headers['Cache-Control'] = 'public, max-age=31536000' response.headers['Expires'] = expires.strftime( "%a, %d %b %Y %H:%M:%S GMT") response.headers['Last-Modified'] = now.strftime( "%a, %d %b %Y %H:%M:%S GMT") else: _no_cache(response) return response
def api_runSimulation(): from pykern import pkjson data = _parse_data_input(validate=True) # if flag is set # - check status # - if status is bad, rewrite the run dir (XX race condition, to fix later) # - then request it be started if feature_config.cfg.runner_daemon: jhash = template_common.report_parameters_hash(data) run_dir = simulation_db.simulation_run_dir(data) status = runner_client.report_job_status(run_dir, jhash) already_good_status = [ runner_client.JobStatus.RUNNING, runner_client.JobStatus.COMPLETED ] if status not in already_good_status: data['simulationStatus'] = { 'startTime': int(time.time()), 'state': 'pending', } tmp_dir = run_dir + '-' + jhash + '-' + uuid.uuid4( ) + srdb.TMP_DIR_SUFFIX cmd, _ = simulation_db.prepare_simulation(data, tmp_dir=tmp_dir) runner_client.start_report_job(run_dir, jhash, cfg.backend, cmd, tmp_dir) res = _simulation_run_status_runner_daemon(data, quiet=True) return http_reply.gen_json(res) else: res = _simulation_run_status(data, quiet=True) if ((not res['state'] in _RUN_STATES and (res['state'] != 'completed' or data.get('forceRun', False))) or res.get('parametersChanged', True)): try: _start_simulation(data) except runner.Collision: pkdlog('{}: runner.Collision, ignoring start', simulation_db.job_id(data)) res = _simulation_run_status(data) return http_reply.gen_json(res)
def app_run_status(): data = _json_input() sid = simulation_db.parse_sid(data) simulation_type = data['simulationType'] template = sirepo.template.import_module(simulation_type) run_dir = simulation_db.simulation_run_dir(data) if cfg.job_queue.is_running(sid): completion = template.background_percent_complete(data, run_dir, True) state = 'running' else: data = simulation_db.open_json_file(simulation_type, sid=sid) state = data['models']['simulationStatus']['state'] completion = template.background_percent_complete(data, run_dir, False) if state == 'running': if completion['frame_count'] == completion['total_frames']: state = 'completed' else: state = 'canceled' data['models']['simulationStatus']['state'] = state simulation_db.save_simulation_json(data['simulationType'], data) frame_id = '' elapsed_time = '' if 'last_update_time' in completion: frame_id = completion['last_update_time'] elapsed_time = int(frame_id) - int( data['models']['simulationStatus']['startTime']) return flask.jsonify({ 'state': state, 'percentComplete': completion['percent_complete'], 'frameCount': completion['frame_count'], 'totalFrames': completion['total_frames'], 'frameId': frame_id, 'elapsedTime': elapsed_time, })
def app_run_status(): data = _json_input() sid = simulation_db.parse_sid(data) simulation_type = data['simulationType'] template = sirepo.template.import_module(simulation_type) run_dir = simulation_db.simulation_run_dir(data) if cfg.job_queue.is_running(sid): completion = template.background_percent_complete(data, run_dir, True) state = 'running' else: data = simulation_db.open_json_file(simulation_type, sid=sid) state = data['models']['simulationStatus']['state'] completion = template.background_percent_complete(data, run_dir, False) if state == 'running': if completion['frame_count'] == completion['total_frames']: state = 'completed' else: state = 'canceled' data['models']['simulationStatus']['state'] = state simulation_db.save_simulation_json(data['simulationType'], data) frame_id = '' elapsed_time = '' if 'last_update_time' in completion: frame_id = completion['last_update_time'] elapsed_time = int(frame_id) - int(data['models']['simulationStatus']['startTime']) return flask.jsonify({ 'state': state, 'percentComplete': completion['percent_complete'], 'frameCount': completion['frame_count'], 'totalFrames': completion['total_frames'], 'frameId': frame_id, 'elapsedTime': elapsed_time, })
def _simulation_run_status_runner_daemon(data, quiet=False): """Look for simulation status and output Args: data (dict): request quiet (bool): don't write errors to log Returns: dict: status response """ try: run_dir = simulation_db.simulation_run_dir(data) jhash = template_common.report_parameters_hash(data) status = runner_client.report_job_status(run_dir, jhash) is_running = status is runner_client.JobStatus.RUNNING rep = simulation_db.report_info(data) res = {'state': status.value} if not is_running: if status is not runner_client.JobStatus.MISSING: res, err = runner_client.run_extract_job( run_dir, jhash, 'result', data, ) if err: return _simulation_error(err, 'error in read_result', run_dir) if simulation_db.is_parallel(data): new = runner_client.run_extract_job( run_dir, jhash, 'background_percent_complete', is_running, ) new.setdefault('percentComplete', 0.0) new.setdefault('frameCount', 0) res.update(new) res['parametersChanged'] = rep.parameters_changed if res['parametersChanged']: pkdlog( '{}: parametersChanged=True req_hash={} cached_hash={}', rep.job_id, rep.req_hash, rep.cached_hash, ) #TODO(robnagler) verify serial number to see what's newer res.setdefault('startTime', _mtime_or_now(rep.input_file)) res.setdefault('lastUpdateTime', _mtime_or_now(rep.run_dir)) res.setdefault('elapsedTime', res['lastUpdateTime'] - res['startTime']) if is_running: res['nextRequestSeconds'] = simulation_db.poll_seconds( rep.cached_data) res['nextRequest'] = { 'report': rep.model_name, 'reportParametersHash': rep.cached_hash, 'simulationId': rep.cached_data['simulationId'], 'simulationType': rep.cached_data['simulationType'], } pkdc( '{}: processing={} state={} cache_hit={} cached_hash={} data_hash={}', rep.job_id, is_running, res['state'], rep.cache_hit, rep.cached_hash, rep.req_hash, ) except Exception: return _simulation_error(pkdexc(), quiet=quiet) return res
def app_clear_frames(): """Clear animation frames for the simulation.""" data = _json_input() simulation_db.simulation_run_dir(data, remove_dir=True) return '{}'