def agent_details(agent_id): info = g.db.hgetall(jdb.KEY_AGENT % agent_id) if not info: abort(404) history = [] for session_id in g.db.lrange(jdb.KEY_AGENT_HISTORY % agent_id, 0, 19): build_uuid = session_id.split('-')[0] build = Build.load(build_uuid) session = get_session(g.db, session_id) history.append({ 'session_id': session_id, 'build_id': build.build_id, 'job_name': build.job_name, 'number': build.number, 'started': session['started'], 'ended': session['ended'], 'result': session['result'], 'title': get_session_title(session) }) return jsonify(id=agent_id, nick=info.get('nick', ''), state=info["state"], seen=int(info["seen"]), labels=info["labels"].split(","), history=history)
def get_session_info(session_id): session = get_session(g.db, session_id) build_uuid = session_id.split('-')[0] build = Build.load(build_uuid) recipe = Recipe.load(build.recipe, build.recipe_ref) job = Job.load(build.job_name, build.job_ref) # Calculate the actual parameters - setting defaults if static value. # (parameters that have a function as default value will have them # called just before starting the job) param_def = job.get_merged_params() parameters = build.parameters for name in param_def: param = param_def[name] if 'default' in param and not name in parameters: parameters[name] = param['default'] return jsonify(run_info=session['run_info'] or {}, build_uuid=build_uuid, build_name="%s-%d" % (build.job_name, build.number), recipe=recipe.contents, ss_token=build.ss_token, ss_url=current_app.config['SS_URL'], parameters=parameters)
def get_session_result(session_id): while True: info = get_session(g.db, session_id) if not info: abort(404, "Session ID not found") if info['state'] == SESSION_STATE_DONE: return jsonify(result=info['result'], output=info['output']) return jsonify(state=info['state'])
def get_build2(job_name, number): number = int(number) build_uuid = g.db.lindex(KEY_JOB_BUILDS % job_name, number - 1) if build_uuid is None: abort(404, 'Not Found') build = Build.load(build_uuid) if not build: abort(404, 'Invalid Build ID') log = g.db.lrange(KEY_SLOG % build_uuid, 0, 1000) log = [json.loads(l) for l in log] # Fetch information about all sessions sessions = [] for i in range(int(build.next_sess_id)): s = get_session(g.db, '%s-%d' % (build_uuid, i)) sessions.append({ 'num': i, 'agent_id': s['agent'], 'agent_nick': '', 'title': get_session_title(s), 'log_file': s['log_file'], 'parent': s['parent'], 'state': s['state'], 'result': s['result'] }) # Fetch info about the agents (the nick name) agents = set([s['agent_id'] for s in sessions]) for agent_id in agents: nick = g.db.hget(KEY_AGENT % agent_id, 'nick') for s in sessions: if s['agent_id'] == agent_id: s['agent_nick'] = nick return jsonify(build=build.as_dict(), uuid=build_uuid, log=log, sessions=sessions)
def perform(session_id): db = jdb.conn() session = get_session(db, session_id) lkeys = [jdb.KEY_LABEL % label for label in session['labels']] lkeys.append(jdb.KEY_AVAILABLE) ts = float(time.time() * 1000) alloc_key = jdb.KEY_ALLOCATION % random_sha1() while True: with db.pipeline() as pipe: try: pipe.watch(jdb.KEY_AVAILABLE) pipe.sinterstore(alloc_key, lkeys) agent_id = pipe.spop(alloc_key) if not agent_id: pipe.multi() set_session_queued(pipe, session_id) pipe.zadd(jdb.KEY_QUEUED_SESSIONS, ts, session_id) pipe.delete(alloc_key) pipe.execute() logging.debug("No agent available - queuing") else: agent_info = DispatchSession.allocate( pipe, agent_id, session_id) pipe.delete(alloc_key) pipe.execute() if not agent_info: logging.debug("Tried to allocate %s. Bummer" % agent_id) continue logging.debug("Dispatching to %s" % agent_id) do_dispatch(db, agent_id, agent_info, session_id) return except redis.WatchError: continue