def fetch_learning(request): import urllib2, json query_string = request.GET['hash'] + ',' + request.GET['type'] + ',' + str( get_config('env', 'cpu')) + ',' + str(get_config( 'env', 'memory')) + ',' + str(os_to_int()) api_bus = get_config('ml', 'api') + '/Index/share/q/' + query_string try: req = urllib2.Request(api_bus) res_data = urllib2.urlopen(req) res = json.loads(res_data.read()) session_dict = { 'hash': request.GET['hash'], 'type': request.GET['type'], 'a': res['a'], 'b': res['b'], 'r': res['r'], } request.session['learning'] = session_dict template = loader.get_template('ui/fetch_learning.html') context = RequestContext(request, { 'step': res, }) return success(template.render(context)) except Exception, e: return error(api_bus)
def delete_job_file(request, f): import base64 file_path = os.path.join(get_config('env', 'workspace'), str(request.user.id), base64.b64decode(f)) delete_file(file_path) return success('Deleted')
def add_job(request): if request.method == 'POST': job_form = SingleJobForm(request.POST) if job_form.is_valid(): cd = job_form.cleaned_data try: if cd['parameter'].find(';') == -1: cd['parameter'] += ';' protocol = ProtocolList.objects.get(id=cd['protocol']) if protocol.check_owner( request.user.id) or request.user.is_superuser: job = Queue( protocol_id=cd['protocol'], parameter=cd['parameter'], run_dir=get_config('env', 'workspace'), user_id=request.user.id, input_file=cd['input_files'], ) if check_disk_quota_lock(request.user.id): job.save() return success('Successfully added job into queue.') else: return error( 'You have exceed the disk quota limit! Please delete some files!' ) else: return error('You are not owner of the protocol.') except Exception, e: return error(e) return error(str(job_form.errors))
def get_disk_quota_info(user): try: disk_pool = int(get_config('env', 'disk_quota')) disk_used = get_user_folder_size(user) disk_perc = int(round(disk_used / disk_pool * 100)) except: disk_pool = disk_used = disk_perc = 0 return disk_pool, disk_used, disk_perc
def handle_uploaded_file(f): import os file_name = os.path.join(get_config('env', 'batch_job'), rand_sig() + '.txt') with open(file_name, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) return file_name
def show_upload_files(request): user_path = os.path.join(get_config('env', 'workspace'), str(request.user.id), 'uploads') if not os.path.exists(user_path): try: os.makedirs(user_path) except Exception, e: return render(request, 'ui/error.html', {'error_msg': e})
def check_disk_quota_lock(user): disk_limit = get_config('env', 'disk_quota') if disk_limit: if get_user_folder_size(user) < int(disk_limit): return 1 else: return 0 else: return 1
def download_upload_file(request, f): import base64 file_path = os.path.join(get_config('env', 'workspace'), str(request.user.id), 'uploads', base64.b64decode(f.replace('f/', ''))) try: response = FileResponse(open(file_path, 'rb')) response['Content-Type'] = 'application/octet-stream' response['Content-Disposition'] = 'attachment;filename="{0}"'.format( os.path.basename(file_path)) response['Content-Length'] = os.path.getsize(file_path) return response except Exception, e: return error(e)
def show_job_log(request): if request.method == 'POST': query_job_form = JobManipulateForm(request.POST) if query_job_form.is_valid(): cd = query_job_form.cleaned_data log_path = os.path.join(get_config('env', 'log'), str(cd['job'])) try: log_file = open(log_path, mode='r') log = log_file.readlines() log.reverse() log = log[:100] # log_content = [line+'<br />' for line in log] log_content = '<br />'.join(log) log_file.close() return success(log_content) except Exception, e: return error(e) else: return error(str(query_job_form.errors))
def show_workspace(request): import time import base64 user_files = [] user_path = os.path.join(get_config('env', 'workspace'), str(request.user.id), 'uploads') for file_name in os.listdir(user_path): file_path = os.path.join(user_path, file_name) tmp = dict() tmp['name'] = file_name tmp['file_size'] = os.path.getsize(file_path) tmp['file_create'] = time.ctime(os.path.getctime(file_path)) tmp['trace'] = base64.b64encode(file_name) user_files.append(tmp) context = {'user_files': user_files} return render(request, 'ui/show_workspace.html', context)
def show_job_folder(request): import time, base64 if request.method == 'POST': query_job_form = JobManipulateForm(request.POST) if query_job_form.is_valid(): cd = query_job_form.cleaned_data try: job = Queue.objects.get(id=cd['job']) if job.check_owner( request.user.id) or request.user.is_superuser: result_folder = job.get_result() user_path = os.path.join(get_config('env', 'workspace'), str(request.user.id), result_folder) user_files = [] for root, dirs, files in os.walk(user_path): for file_name in files: file_full_path = os.path.join(root, file_name) file_path = file_full_path.replace(user_path+'\\', '')\ .replace(user_path+'/', '').replace(user_path, '') tmp = dict() tmp['name'] = file_path tmp['file_size'] = os.path.getsize(file_full_path) tmp['file_create'] = time.ctime( os.path.getctime(file_full_path)) tmp['trace'] = base64.b64encode( os.path.join(result_folder, file_path)) user_files.append(tmp) template = loader.get_template('ui/show_job_folder.html') context = RequestContext(request, { 'user_files': user_files, }) return success(template.render(context)) else: return error('Your are not the owner of the job.') except Exception, e: return error(e) else: return error(str(query_job_form.errors))
def settings(request): if request.method == 'POST': set_config('env', 'workspace', request.POST['path']) set_config('env', 'cpu', request.POST['cpu']) set_config('env', 'memory', request.POST['mem']) set_config('env', 'disk_quota', request.POST['dquota']) set_config('ml', 'confidence_weight_disk', request.POST['dcw']) set_config('ml', 'confidence_weight_mem', request.POST['mcw']) set_config('ml', 'confidence_weight_cpu', request.POST['ccw']) set_config('ml', 'threshold', request.POST['ccthr']) if request.POST['sender'] != '': set_config('mail', 'notify', 'on') set_config('mail', 'sender', request.POST['sender']) set_config('mail', 'mail_host', request.POST['mailhost']) set_config('mail', 'mail_port', request.POST['mailport']) set_config('mail', 'mail_user', request.POST['mailuser']) set_config('mail', 'mail_password', request.POST['mailpassword']) else: set_config('mail', 'notify', 'off') return HttpResponseRedirect('/ui/settings') else: try: configuration = { 'run_folder': get_config('env', 'workspace'), 'cpu': get_config('env', 'cpu'), 'memory': get_config('env', 'memory'), 'disk_quota': get_config('env', 'disk_quota'), 'threshold': get_config('ml', 'threshold'), 'disk_confidence_weight': get_config('ml', 'confidence_weight_disk'), 'mem_confidence_weight': get_config('ml', 'confidence_weight_mem'), 'cpu_confidence_weight': get_config('ml', 'confidence_weight_cpu'), 'max_disk': round((get_disk_free(get_config('env', 'workspace')) + get_disk_used(get_config('env', 'workspace'))) / 1073741824), 'free_disk': round( get_disk_free(get_config('env', 'workspace')) / 1073741824), 'sender': get_config('mail', 'sender'), 'mail_host': get_config('mail', 'mail_host'), 'mail_port': get_config('mail', 'mail_port'), 'mail_user': get_config('mail', 'mail_user'), 'mail_password': get_config('mail', 'mail_password'), } except Exception, e: return render(request, 'ui/error.html', {'error_msg': e}) return render(request, 'ui/settings.html', configuration)
def export_protocol(request): if request.method == 'GET': if 'id' in request.GET: protocol_data = dict() try: protocol_parent = ProtocolList.objects.get( id=int(request.GET['id'])) except ProtocolList.DoesNotExist: from django.http import Http404 raise Http404("Protocol does not exist") protocol_data['name'] = protocol_parent.name protocol_data['step'] = [] if protocol_parent.check_owner( request.user.id) or request.user.is_superuser: steps = Protocol.objects.filter(parent=int(request.GET['id'])) for step in steps: try: equations = Prediction.objects.filter( step_hash=step.hash) cpu_a = cpu_b = cpu_r = mem_a = mem_b = mem_r = disk_a = disk_b = disk_r = 0 for equation in equations: if equation.type == 1: disk_a = equation.a disk_b = equation.b disk_r = equation.r elif equation.type == 2: mem_a = equation.a mem_b = equation.b mem_r = equation.r elif equation.type == 3: cpu_a = equation.a cpu_b = equation.b cpu_r = equation.r tmp = { 'software': step.software, 'parameter': step.parameter, 'cpu': get_config('env', 'cpu'), 'mem': get_config('env', 'memory'), 'os': os_to_int(), 'cpu_a': cpu_a, 'cpu_b': cpu_b, 'cpu_r': cpu_r, 'mem_a': mem_a, 'mem_b': mem_b, 'mem_r': mem_r, 'disk_a': disk_a, 'disk_b': disk_b, 'disk_r': disk_r, } except: tmp = { 'software': step.software, 'parameter': step.parameter, } protocol_data['step'].append(tmp) return build_json_protocol(protocol_data) else: return error('You are not owner of the protocol.') else: return error('Unknown parameter.') else: return error('Method error.')
def batch_job(request): if request.method == 'POST': form = BatchJobForm(request.POST, request.FILES) if form.is_valid(): file_name = handle_uploaded_file(request.FILES['job_list']) try: protocol_cache = dict() with open(file_name) as f: jobs = f.readlines() job_list = [] for job in jobs: configurations = job.split('\n')[0].split('\t') if len(configurations) == 3: if check_disk_quota_lock(request.user.id): protocol_id = int(configurations[0]) if protocol_id not in protocol_cache: try: protocol = ProtocolList.objects.get( id=protocol_id) protocol_cache[ protocol_id] = protocol.user_id except Exception, e: return render(request, 'ui/error.html', {'error_msg': e}) if protocol_cache[ protocol_id] == request.user.id or request.user.is_superuser: job_list.append( Queue(input_file=configurations[1], parameter=configurations[2], run_dir=get_config( 'env', 'workspace'), protocol_id=protocol_id, user_id=request.user.id)) else: return render( request, 'ui/error.html', { 'error_msg': 'You are not the owner of the protocol(%s)' % protocol_id }) else: return render( request, 'ui/error.html', { 'error_msg': 'You have exceed the disk quota limit! Please delete some files!' }) else: return render( request, 'ui/error.html', { 'error_msg': 'Your job list file must contain three columns.' }) Queue.objects.bulk_create(job_list) return HttpResponseRedirect('/ui/query-job') except Exception, e: return render(request, 'ui/error.html', {'error_msg': e}) else: return render(request, 'ui/error.html', {'error_msg': str(form.errors)})