def main(): parser = ArgumentParser(prog = 'st nuke') parser.add_argument('--session_id', '-s', help = 'The session identifier.', required = True) args = parser.parse_args() store = Store(args.session_id) store.nuke()
def get(self, session_id): s = Store(session_id) check_auth(s, 'cases') return { 'cases': dict((name, cases.to_list_of_dicts(('diffs', 'errors', 'actual'))) for name, cases in s.cases_getall().items()) }
def main(): parser = ArgumentParser(prog = 'st auth') parser.add_argument('--session_id', '-s', help = 'The session identifier.', required = True) parser.add_argument('realms', help = 'The realms to authorize.', nargs = '+') args = parser.parse_args() store = Store(args.session_id) print(store.sessions_dumps(args.realms))
def main(): parser = ArgumentParser(prog='st logs') parser.add_argument('--follow', '-f', default=False, help='Whether to keep waiting for new logs.', action='store_true') args = parser.parse_args() while True: record = Store.get_logentry(args.follow) if not record: break print(record)
def pubsub_forwarder(): import redis app.logger.info('Started pub/sub forwarder') r = redis.Redis.from_url('redis://{}'.format( environ.get('SCYTHE_REDIS_HOST', 'localhost')), decode_responses=True) p = r.pubsub() p.subscribe('summaries_channel') while True: socketio.emit('load_message', {'load': Store.jobs_num()}) message = p.get_message() if message and message['type'] == 'message' and message[ 'channel'] == 'summaries_channel': socketio.emit('summary_message', {'summary': loads(message['data'])}) app.logger.info('Forwarded a new result to websocket') continue socketio.sleep(PUBSUB_SLEEP_SECONDS)
def add(path, session_id): config = {} with open(path, 'r') as f: exec(f.read(), config) LOGGER.info('Read session {} configuration'.format(session_id)) store = Store(session_id) n = store.uids_addall(config['REGISTERED_UIDS'].items()) LOGGER.info('Imported {} uid(s)'.format(n)) temp_dir = tar2tmpdir(decodebytes(config['TAR_DATA'].encode('utf-8'))) for exercise_path in glob(join(temp_dir, '*')): exercise_name = basename(exercise_path) exercise_cases = TestCases(exercise_path) if len(exercise_cases) == 0: LOGGER.warn('Missing cases for {}'.format(exercise_name)) else: n = store.cases_add(exercise_name, exercise_cases) LOGGER.info('Imported {} case(s) for exercise {}'.format( n, exercise_name)) list_of_texts = [] for text_path in glob(join(exercise_path, TEXTS_GLOB)): text_name = splitext(basename(text_path))[0] with io.open(text_path, 'r', encoding=DEFAULT_ENCODING) as tf: text = tf.read() list_of_texts.append({'name': text_name, 'content': text}) if not list_of_texts: LOGGER.warn('Missing texts for {}'.format(exercise_name)) else: n = store.texts_add(exercise_name, list_of_texts) LOGGER.info('Imported {} text(s) for exercise {}'.format( n, exercise_name)) store.sessions_add(config['SECRET_KEY']) rmrotree(temp_dir)
def get(self, session_id): s = Store(session_id) check_auth(s, 'texts') return {'texts': s.texts_getall()}
def get(self, session_id): s = Store(session_id) check_auth(s, 'summaries') return {'summaries': s.summaries_getall()}
def get(self, session_id): s = Store(session_id) return {'exercises': s.texts_exercises()}
def get(self, session_id): s = Store(session_id) return {'uids': s.uids_getall()}
def get(self): return {'sessions': Store.get_sessions()}
def get(self, session_id, uid, timestamp, exercise): s = Store(session_id) check_auth(s, 'results') s.set_harvest(uid, timestamp) return {'results': s.results_get(exercise)}
def get(self, session_id, uid, timestamp, exercise): s = Store(session_id) check_auth(s, 'compilations') s.set_harvest(uid, timestamp) return {'compilations': s.compilations_get(exercise)}