def add_excluded(data, jid): for i in data['excluded_regions']: normal = i['start'] + ':' + i['chain'] + ' ' + '-' + ' ' + i['end'] + ':' + i['chain'] jmol = i['start'] + '-' + i['end'] + ':' + i['chain'] query_db("INSERT INTO excluded(jid,excluded_region, excluded_jmol) \ VALUES(%s,%s,%s)", [jid, normal, jmol], insert=True) return True
def user_add_constraints(data, jid): query_db("UPDATE user_queue SET constraints_scaling_factor=%s \ WHERE jid=%s", [data['scaling_factor'], jid], insert=True) for i in data['flexible_regions']: constraint_definition = i['start'] + ':' + i['chain'] + ' ' + '-' + ' ' + i['end'] + ':' + i['chain'] force = i['flexibility'] force_power = 0 if force == "full": force_power = 0 if force == "moderate": force_power = 0.5 constraint_jmol = i['start'] + '-' + i['end'] + ':' + i['chain'] query_db("INSERT INTO constraints(`jid`,`constraint_definition`,`force`,\ `constraint_jmol`) VALUES(%s,%s,%s,%s)", [jid, constraint_definition, force_power, constraint_jmol], insert=True) return True
def job_info(job_id): todel = str(app.config['DELETE_USER_JOBS_AFTER']) id = job_id system_info = query_db("SELECT ligand_sequence, receptor_sequence, \ status_date, date_add(status_date, interval %s day) del, \ ligand_chain, status_init status_change, project_name, status, \ ligand_ss, ss_psipred FROM user_queue \ WHERE jid=%s", [todel, id], one=True) flexible = query_db("SELECT constraint_definition FROM constraints WHERE jid=%s", [id]) excluded = query_db("SELECT excluded_region FROM excluded WHERE jid=%s", [id]) system_info['flexible'] = map(lambda x: x['constraint_definition'], flexible) system_info['excluded'] = map(lambda x: x['excluded_region'], excluded) if not system_info: return jsonify({ 'id': id, 'status': 'error'}) else: return jsonify(system_info)
def add_data_to_db(data, console): jid = unique_id() pdb = read_sequence_from_content_and_save(data['file_content'], jid) console_content = console.read() if console is not None else '' return (query_db("INSERT INTO user_queue(jid, email, receptor_sequence, \ ligand_sequence, ligand_ss, hide, project_name,simulation_length,status,console) \ VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", [jid, data['email'], pdb, data['ligand_seq'], data['ligand_ss'], data['show'], data['project_name'], data['simulation_cycles'], 'pre_queue', console_content], insert=True), jid)
def status(job_id): todel = str(app.config['DELETE_USER_JOBS_AFTER']) id = job_id system_info = query_db("SELECT ligand_sequence, receptor_sequence, \ status_date, date_add(status_date, interval %s day) del, \ ligand_chain, status_init status_change, project_name, status, \ ligand_ss, ss_psipred FROM user_queue \ WHERE jid=%s", [todel, id], one=True) if not system_info: return jsonify({ 'id': id, 'status': 'error'}) else: return jsonify({ 'status': system_info['status']})
def delete_old(): days = str(app.config['DELETE_USER_JOBS_AFTER']) to_delete = [] for keyid in query_db("select jid from user_queue where status_date > \ (select status_date from user_queue where jid=%s) and \ status_date <= now() - interval %s day", [app.config['EXAMPLE_JOB'], days]): to_delete.append(keyid['jid']) pat_k = path.join(app.config['USERJOB_DIRECTORY'], keyid['jid']) rmtree(pat_k) for k in to_delete: query_db("DELETE FROM models_skip WHERE jid=%s", [k]) query_db("DELETE FROM constraints WHERE jid=%s", [k]) query_db("DELETE FROM user_queue WHERE jid=%s", [k], insert=True) return Response("HEIL ERIS!", mimetype='text/plain')
def login(): error = None if 'logged_in' in request.cookies and 'username' in request.cookies: if request.cookies.get('logged_in') == 1: return redirect(url_for('hello')) if request.method == 'POST': email = escape(request.form['email']) password = escape(request.form['password']) hashed = util.secure_md5_hashing(password) results = config.query_db("select username from users where \ email='{email}' and password='******' limit 1" .format(email=email, hashed=hashed), one=True) if results: flash('You were successfully logged in', 'success') resp = app.make_response(redirect(url_for('hello'))) resp.set_cookie('username',value=results['username']) resp.set_cookie('logged_in',value='1') return resp else: error = 'Invalid credentials' return render_template('login.html', error=error)
def get_all_posts(): cur = config.query_db("select * \ from posts \ WHERE published = 1") return cur
def get_post(slug): cur = config.query_db("select * \ from posts p \ where p.id=?", [slug], one=True) return cur
def get_job_all(jid): todel = str(app.config['DELETE_USER_JOBS_AFTER']) system_info = query_db("SELECT ligand_sequence, receptor_sequence, \ status_date, date_add(status_date, interval %s day) del, \ ligand_chain, status_init status_change, project_name, status, \ ligand_ss, ss_psipred FROM user_queue \ WHERE jid=%s", [todel, jid], one=True) if not system_info: return jsonify({ 'status': 'error'}) models = {'models': [], 'clusters': [], 'replicas': []} udir_path = os.path.join(app.config['USERJOB_DIRECTORY'], jid) clust_details = cluster_stats(jid) for d in ['models', 'replicas', 'clusters']: tm = [fil.split("/")[-1] for fil in glob(udir_path + "/" + d + "/*.gz")] models[d] = sorted(tm, key=alphanum_key) range_list = xrange(0, 10) if request.method == 'POST' and request.json: value = request.json.get('filter') # po cluster_stats min = request.json.get('min') max = request.json.get('max') if value and min and max: filtered_results = filter(lambda x: float(x[value]) > float(min) and float(x[value]) <= float(max), clust_details) range_list = map(lambda x: x['id'], filtered_results) elif value and min: filtered_results = filter(lambda x: float(x[value]) > float(min), clust_details) range_list = map(lambda x: x['id'], filtered_results) elif value and max: filtered_results = filter(lambda x: float(x[value]) <= float(max), clust_details) range_list = map(lambda x: x['id'], filtered_results) else: range_list = xrange(0, 10) results = [] for i in range_list: path_dir = os.path.join(app.config['USERJOB_DIRECTORY'], jid, "models", models['models'][i]) with gzip.open(path_dir) as data: file_content_model = data.read() path_dir = os.path.join(app.config['USERJOB_DIRECTORY'], jid, "clusters", models['clusters'][i]) with gzip.open(path_dir) as data: file_content_cluster = data.read() path_dir = os.path.join(app.config['USERJOB_DIRECTORY'], jid, "replicas", models['replicas'][i]) with gzip.open(path_dir) as data: file_content_replica = data.read() results.append({ 'jobid': jid, 'info': system_info, 'model': i + 1, 'model_data': file_content_model, 'cluster_data': file_content_cluster, 'trajectory_data': file_content_replica, 'cluster_density': clust_details[i]["density"], 'average_rmsd': clust_details[i]["rmsd"], 'max_rmsd': clust_details[i]["maxrmsd"], 'elements': clust_details[i]["counts"] }) result = {'models': results} return jsonify(result)
def parse_server_talking(task, secret_key, jid): k = config['REMOTE_SERVER_SECRET_KEY'] if k == secret_key and request.remote_addr in config['REMOTE_SERVER_IP']: # added "firewall" (he he) for selected IP only # status commander if task == 'S_Q': # job in real queue query_db("UPDATE user_queue set status='queue', \ status_init=now() WHERE jid=%s", [jid], insert=True) elif task == 'S_E': # job error query_db("UPDATE user_queue set status='error',\ status_init=now() WHERE jid=%s", [jid], insert=True) tomail = query_db("SELECT email FROM user_queue WHERE jid=%s", [jid], one=True) send_mail(subject="error "+jid) if 'email' in tomail and len(tomail['email']) > 1: send_mail(to=tomail['email'], subject="error with job", body='Your job ('+jid+') got error status. Robot informed admin.') elif task == 'S_R': # job running query_db("UPDATE user_queue set status='running', \ status_init=now() WHERE jid=%s", [jid], insert=True) tomail = query_db("SELECT email FROM user_queue WHERE jid=%s", [jid], one=True) if 'email' in tomail and len(tomail['email']) > 1: send_mail(to=tomail['email'], subject="Job is running: "+jid, body="Wait for second mail about job done (or job error).") elif task == 'S_D': # job done query_db("UPDATE user_queue set status='done', \ status_init=now() WHERE jid=%s", [jid], insert=True) q = query_db("SELECT email,project_name FROM user_queue \ WHERE jid=%s", [jid], one=True) if 'email' in q and len(q['email']) > 1: send_mail(to=q['email'], subject="Job "+q['project_name']+" completed", body="Get results: "+url_for('job_status', jid=jid, _external=True) + " . Thanks for using our server") elif task == "MSG": msg = request.form['msg'] send_mail(subject="DEBUG/ERROR: "+msg) elif task == "LOAD" and request.method == 'POST': load = int(request.form['load']) hostname = request.form['hostname'] query_db("UPDATE server_load SET `load`=%s, `name`=%s,\ `status_date`=now() WHERE id=0", [load, hostname], insert=True) elif task == "MSG": msg = request.form['msg'] send_mail(subject="DEBUG/ERROR: "+msg) elif task == "SENDSS" and request.method == 'POST': ss = request.form['ss'] query_db("UPDATE user_queue set ligand_ss=%s,ss_psipred=1 \ WHERE jid=%s", [ss, jid], insert=True) elif task == "LIGCHAIN" and request.method == 'POST': ss = request.form['chain'] query_db("UPDATE user_queue set ligand_chain=%s \ WHERE jid=%s", [ss, jid], insert=True) elif task == "LIGANDSEQ": t = query_db("SELECT ligand_sequence,ligand_ss FROM user_queue \ WHERE jid=%s", [jid], one=True) out = {'sequence': 'JESTEM', 'secstr': 'HAkER3M'} if t: out = {'sequence': t['ligand_sequence'], 'secstr': t['ligand_ss']} return Response(json.dumps(out), mimetype='application/json') elif task == "SCALFACTOR": t = query_db("SELECT constraints_scaling_factor FROM user_queue \ WHERE jid=%s", [jid], one=True) return Response(json.dumps({'constraints_scaling_factor': t['constraints_scaling_factor']}), mimetype='application/json') elif task == "LENGTH": t = query_db("SELECT simulation_length FROM user_queue \ WHERE jid=%s", [jid], one=True) return Response(json.dumps({'sim_length': t['simulation_length']}), mimetype='application/json') elif task == "JOBNAME": t = query_db("SELECT project_name FROM user_queue WHERE jid=%s", [jid], one=True) return Response(json.dumps({'jobname': t['project_name']}), mimetype='application/json') elif task == "RESTRAINTS": t = query_db("SELECT `constraint_definition`,`force` FROM constraints \ WHERE jid=%s", [jid]) out = [{'def': row['constraint_definition'], 'force': row['force']} for row in t] return Response(json.dumps(out), mimetype='application/json') elif task == "CONSOLE": t = query_db("SELECT console FROM user_queue WHERE jid=%s", [jid], one=True) return Response(json.dumps(t), mimetype='application/json') elif task == "EXCLUDED": t = query_db("SELECT excluded_region FROM excluded \ WHERE jid=%s", [jid]) out = [{'excluded': row['excluded_region']} for row in t] return Response(json.dumps(out), mimetype='application/json') elif task == 'SKIPMODELS': t = query_db("SELECT model_id, removed_model, prev_jid \ FROM models_skip WHERE jid=%s", [jid]) # mozliwe ze ta konwersja nie jest konieczna TODO out = [] for row in t: out.append({'model_id': row['model_id'], 'prev_jid': row['prev_jid'], 'model_body': row['removed_model']}) return Response(json.dumps(out), mimetype='application/json') elif task == "SEND" and request.method == 'POST': user_dir = path.join(app.config['USERJOB_DIRECTORY'], jid) for d in ["models", 'clusters', 'replicas']: if not path.exists(path.join(user_dir, d)): makedirs((path.join(user_dir, d))) for file in request.files.keys(): filename = file request.files[file].save(path.join(user_dir, filename)) return Response("HEIL ERIS!", mimetype='text/plain')
def index_queue(): q = query_db("SELECT jid FROM user_queue WHERE status='pre_queue'") d = "\n".join([str(i['jid']) for i in q]) return Response(d, mimetype='text/plain')