def view(request, project, build, test_id, case_name, attachment_name): api_link = '/'.join([project, build, test_id, case_name, attachment_name]) path = os.path.join(settings.BASE_DIR, 'data', 'data', project, 'raw', build, test_id, case_name, attachment_name) log_file = open(path).read() if project=='bxtp_ivi_m': return new_render(request, 'stackbar_m.html', context={'link': api_link}, title='CP0', subtitle=None,wide=True) elif project=='mmr1_bxtp_ivi_maint': return new_render(request, 'stackbar_m.html', context={'link': api_link}, title='CP0', subtitle=None,wide=True) elif project == 'upload': return new_render(request, 'stackbar.html', context={'link': api_link}, title='CP0', subtitle=None, wide=True) else: if log_file.find('step_') != -1: return new_render(request, 'stackbar.html', context={'link': api_link}, title='CP0', subtitle=None,wide=True) else: return new_render(request, 'stackbar_old.html', context={'link': api_link}, title='CP0', subtitle=None,wide=True)
def slave(request, slave_id): context = {} return new_render(request, 'workspace.slave.html', context=context, title='CP0', subtitle='My Workspace')
def view_log(request, text): data = text.split('.')[-1] if data: text_log = open(text).read().decode('utf-8') return new_render(request, 'log_view.html', {'content': text_log}) else: return render_to_response('500.html')
def add_slave(request): if request.method == 'POST': slave = Slave() slave.owner = request.user slave.install_status = 0 slave_f = SlaveForm(request.POST, instance=slave) if slave_f.is_valid(): username = request.POST["user_name"] ip = request.POST["ip"] passwd = request.POST["passwd"] p = subprocess.Popen("expect ./scripts/autossh.sh {0} {1} {2}".format(username, ip, passwd), stdout=subprocess.PIPE, shell=True) start = datetime.datetime.now() timeout = 3 while p.poll() is None: time.sleep(0.2) now = datetime.datetime.now() if (now - start).seconds > timeout: p.terminate() return HttpResponseRedirect('/dashboard/') p.stdout.readlines() ret = os.system("sshpass -p {0} scp {1}/install.sh {2}@{3}:/tmp/".format(passwd, settings.SCRIPT_PATH, username, ip)) if ret == 0: slave_f.save() p_install = subprocess.Popen("sshpass -p {0} ssh -t {1}@{2} 'echo {0} | sudo -S cp /tmp/install.sh /usr/local/bin/slavemanger && slavemanger -c -g -i -p {0}'".format(passwd, username, ip), stdout=subprocess.PIPE, stderr=subprocess.PIPE ,shell=True) return HttpResponseRedirect('/dashboard/') else: private_slave = Slave.objects.filter(policy='1', owner=request.user.id) SlaveFormSet = modelformset_factory(Slave, form=SlaveForm, max_num=1) context = { 'slaves': private_slave, 'formsets': [SlaveFormSet(queryset=Slave.objects.filter(id=slave.id)) for slave in private_slave], 'newslavefrom': slave_f } return new_render(request, 'profile.html', context=context, subtitle='Profile') return HttpResponseRedirect('/dashboard/')
def index(request): redis_dict = {} for key in rds.keys(): if rds.type(key) == 'list': redis_dict[key] = ', '.join(rds.lrange(key, 0, -1)) elif rds.type(key) == 'string': redis_dict[key] = rds.get(key) # df if 'cp0_dev' in os.environ: fss = [['/dev/sda1', '30', '9', '21', '30%', 'fake data in dev mode']] else: df_list = ['/dev/sda2', '/dev/sdb'] fss = map( lambda x: subprocess.Popen( 'df -h ' + x, shell=True, stdout=subprocess.PIPE).communicate( )[0].split("\n")[1].split(), df_list) context = { 'branch_url': branch_url, 'branch_nodes': branch_nodes, 'redis_dict': OrderedDict(sorted(redis_dict.items(), key=lambda t: t[0])), 'fss': fss, 'mongo_st': mongodb_unavailable() } return new_render(request, 'cp.html', context=context)
def view_attach(request, data): Title = 'CP0' if 'mp4' in data: return new_render(request, "video_view.html", {"video_url": data}, title=Title, subtitle='VIEW_VIDEOS') else: title = 'Chart View' all_data = CSV_PARSE(data).bar_total() return new_render(request, "charts_common.html", { 'alldata': all_data, "title_name": title }, title=Title, subtitle='VIEW_CHARTS')
def attachments(request, project, build, test_id, case_name, attachment_name): file_path = os.path.join(data_path, project, 'raw', build, test_id, case_name, attachment_name) file_ext = attachment_name.split('.')[-1] if os.path.exists(file_path): print 'file_ext', file_ext if file_ext == 'png' or file_ext == 'jpg': template = 'pic_viewer.html' return new_render( request, 'pic_viewer.html', { 'content': 'data:image/' + file_ext + ';base64,' + base64.b64encode(open(file_path, 'r').read()) }) elif file_ext == 'h5': fs = open(file_path, 'r') content = fs.read() fs.close() return HttpResponse(content) elif file_ext in ['zip', 'gz', 'tgz' ] or attachment_name.startswith('logcat'): response = StreamingHttpResponse(file_iterator(file_path)) response['Content-Type'] = 'text/plain' response[ 'Content-Disposition'] = 'attachment;filename="{0}"'.format( attachment_name) return response else: try: fs = open(file_path, 'r') template = 'text_viewer.html' content = {'content': fs.read().decode('utf-8')} return new_render(request, template, content, title='Log Viewer', subtitle=attachment_name) except UnicodeDecodeError: response = StreamingHttpResponse(file_iterator(file_path)) response['Content-Type'] = 'text/plain' response[ 'Content-Disposition'] = 'attachment;filename="{0}"'.format( attachment_name) return response else: return render_to_response('500.html')
def metrix_index(request): prev_label = 'searchTree_' out_dir = prev_label+time.strftime('%Y%m%d_%H%M%S') rdstr = random.random() data_dict = get_metrix_selection_menu_info() out_dir += '_'+str(rdstr).split('.')[-1] data_dict['searchTree'] = out_dir # print data_dict rm_tmp_data_dir(out_dir,prev_label) return new_render(request, 'metrix_index.html',data_dict,title='CP0', subtitle='METRIX', wide=True)
def mytask(request): p = int(request.GET.get('page')) if 'page' in request.GET else "1" tab = request.GET.get('tab') if 'tab' in request.GET else "1" uid = request.user.id return new_render( request, 'mytask.html', { "mytasks": getmytask(p, uid), "myfavorites": getmyfavorites(p, uid), "tab": tab })
def result_compare(request): table_data = GeneralTestResultCompare(request.session['cmp']) context = { 'data': table_data.result(), 'header': table_data.header(), 'case': request.session['cmp'][0].split('/')[3] } return new_render(request, 'general_compare.html', context=context, title='CP0', subtitle='Compare')
def customize(request, tag, project, build=None): customize_dir = os.path.join(data_path, project, tag) if not os.path.exists(customize_dir): return page_not_found(request) current_build = build if project in os.listdir(data_path): builds = filter( lambda x: os.path.isdir(os.path.join(customize_dir, x)), os.listdir(customize_dir)) builds.sort() if len(builds): if not current_build: current_build = builds[-1] prev_build = builds[builds.index(current_build) - 1] # SUMMARY summary = TagSummary(tag_name=tag, build=current_build, prev_build=prev_build, project_name=project) # DETAIL raw_data = [] test_results = summary.get_test_result() for test in test_results: test_id, file_name = test raw_data.append( RawData2(project, current_build, test_id, file_name)) context = { 'tag': tag, 'project': project, 'builds': builds, 'current_build': current_build, 'prev_build': prev_build, 'summary': summary.result, 'raw_data': sorted(raw_data, key=lambda x: (x.case_name, x.loop)), 'attachment_url': '/'.join(['attachment', project, current_build]) } return new_render(request, 'daily.html', context, title=tag, subtitle=project) else: return null_content(request) else: return null_content(request)
def view(request, project, build, test_id, case_name, attachment_name): context = { 'api': '/'.join([ 'chart_data', 'usage_chart_by_instances', project, build, test_id, case_name, attachment_name ]) } return new_render(request, 'usage_by_instances.html', context=context, title='CP0', subtitle=None, wide=True)
def upload_file(request): cur_user = request.user all = uploads.objects.filter(available=True).order_by("-id") for i in all: i.fmattime = ( i.create_time + datetime.timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S') return new_render(request, 'upload.html', { "all": all, "user": cur_user }, title='CP0', subtitle='UPLOAD_FILE')
def profile_view(request): private_slave = Slave.objects.filter(policy='1', owner=request.user.id) SlaveFormSet = modelformset_factory(Slave, form=SlaveForm, max_num=1) private_slave_stat = [] for slave in private_slave: stat = slave.status() is_running = True if "atf_test_id" in stat and stat["atf_test_id"] != "None" else False private_slave_stat.append({"slave_id": slave.id, "is_running": is_running}) context = { 'slaves': private_slave, 'formsets': [SlaveFormSet(queryset=Slave.objects.filter(id=slave.id)) for slave in private_slave], 'newslavefrom': modelform_factory(Slave, form=SlaveForm)(), 'private_slave_stat': private_slave_stat } return new_render(request, 'profile.html', context=context, subtitle='Profile')
def create_project(request): if request.method == 'GET': context = { 'users': [ '%s, %s' % (u.last_name, u.first_name) for u in User.objects.exclude(last_name__exact='') ], 'slaves': [slave.serialize for slave in Slave.objects.all()] } return new_render(request, 'workspace.createproject.html', context=context, title='CP0', subtitle='Create project') elif request.method == 'POST': print('save project') return HttpResponseRedirect('/')
def index(request, project_name, build): cases = request.POST.getlist('cb_case[]') origin_build = build.split('_')[1] if '_' in build else build builds = api.build_list(project_name, 'daily') prev_build = builds[builds.index(build) + 1] prev_origin_build = prev_build.split( '_')[1] if '_' in prev_build else prev_build project = Project.objects.get(name=project_name) merge_req = get_mergerequest(project_name, origin_build) prev_merge_req_num = get_mergerequest(project_name, prev_origin_build).split('/')[1] branch_name, merge_req_num = merge_req.split('/') rds = Redis(host='localhost', port=6379) filter_target = 'gordon_peak_acrn' filtered_mq_list = [] for site in MERGE_REQUEST_SITES: logger.debug('searching %s', site) logger.debug('mq_images_{}_{}'.format(site, project.builder_name)) for mq in rds.lrange( 'mq_images_{}_{}'.format(site, project.builder_name), 0, -1): _, build, target = mq.split('/')[:3] if int(build) < int(merge_req_num) and int(build) > int( prev_merge_req_num) and target == filter_target: if (build, site, MERGE_REQUEST_REPO.format( site, build, target)) not in filtered_mq_list: filtered_mq_list.append( (build, site, MERGE_REQUEST_REPO.format(site, build, target))) context = { 'project_name': project.name, 'project_id': project.id, 'current_build': origin_build, 'current_mq': merge_req_num, 'prev_build': prev_origin_build, 'prev_mq': prev_merge_req_num, 'count': len(filtered_mq_list), 'cases': cases, 'slaves': Slave.objects.all(), 'filtered_mq_list': filtered_mq_list, 'builder_name': project.builder_name, } return new_render(request, 'bisects.html', context=context)
def view_project(request, project_name): # [input[i:i+n] for i in range(0, len(input), n)] rds = Redis(host='localhost') projects = Project.objects.filter(name=project_name) if len(projects): proj = projects[0] context = { 'project': proj, 'products': ', '.join(rds.lrange('products_%s' % proj.builder_name, 0, -1)), 'db_info': api.db_info(project_name) } return new_render(request, 'project.html', context=context, title=project_name, subtitle='Overview')
def production_index(request, production_name): templates = { 'gp21': 'project_index_2_1.html', 'gp2': 'project_index.html', 'kbl': 'project_index_kbl.html', 'aic': 'project_index_aic.html' } filtered_projects = Project.objects.filter( production__short_name=production_name) context = { 'views': VIEWS, 'tag': 'daily', 'filtered_projects': filtered_projects } print(filtered_projects) return new_render(request, templates[production_name], context=context)
def index_page(request, project): global timefrom global timeto build_list = '' index_html = 'index_{0}.html'.format(project) available_filter = patch.objects.filter(project=project).filter( available=True).order_by('-id').exclude(status='MERGED').exclude( status='ABANDONED') usrnames = str(request.user) counts = len(available_filter) clipboard = [] if os.path.exists(buildsPath): build_list = json.load(open(buildsPath, 'r')) build_count = len(filter(lambda x: x['project'] == project, build_list)) for i in available_filter: i.data = i.track.split('/')[-1] i.fomartted_date = ( i.patchtime + datetime.timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S') clipboard.append(i.fullpatch()) all_filter = patch.objects.filter(project=project).order_by('-id') all_counts = str(len(all_filter)) for l in all_filter: l.fomartted_date = ( l.patchtime + datetime.timedelta(hours=8)).strftime('%Y-%m-%d %H:%M:%S') l.data = l.track.split('/')[-1] return new_render(request, index_html, { 'str': available_filter, 'clipboard': '\r'.join(clipboard), 'counts': counts, 'All': patch.objects.filter(project=project), 'users': usrnames, 'ALL': all_filter, 'all_counts': all_counts, "project": project, 'Builds': build_list, 'build_count': build_count }, title='CP0', subtitle=(project.upper()))
def index(request): owner = None if request.user.is_superuser else request.user slaves = Slave.objects.filter(owner=owner) context = { 'joined_projects': [p.serialize for p in Project.objects.all()], 'slaves': [slave.serialize for slave in slaves[:5]], 'public_cases': [{ 'name': 'boot_analyze_2_0.py', 'owner': 'Liang, YuxiangX' }, { 'name': 'Acrn_BTM.py', 'owner': 'Liang, YuxiangX' }, { 'name': 'boottime.py', 'owner': 'Liang, YuxiangX' }, { 'name': 'bootanalyze.py', 'owner': 'Liang, YuxiangX' }, { 'name': 'boot_analyze_2_0.py', 'owner': 'Liang, YuxiangX' }, { 'name': 'adb_rlbench.py', 'owner': 'Liang, YuxiangX' }, { 'name': 'adb_gfx_gl4.py', 'owner': 'Liang, YuxiangX' }], 'private_cases': [{ 'name': 'cp0_master_smoke_test.py', 'owner': 'Chen, ChenX' }, { 'name': 'cp0_slave_smoke_test.py', 'owner': 'Liang, YuxiangX' }] } return new_render(request, 'workspace.html', context=context, title='CP0', subtitle='My Workspace')
def index(request): today_min = datetime.datetime.combine(datetime.date.today(), datetime.time.min) today_max = datetime.datetime.combine(datetime.date.today(), datetime.time.max) logger = map( logger_format, Logger.objects.filter(create_time__range=(today_min, today_max))) slaves = Slave.objects.all() testing_tasks = [] tasks_estimate = [] slave_estimate = [] for slave in slaves: testing = Testing(slave) if testing.alive: testing_tasks.append(testing) available_tasks = Task.objects.filter(available=True) manual_tasks = [ l.task for l in Logger.objects.filter(description='mt') if l.result_link == '#' ] context = { 'subtitle': 'Overview', 'loggers': logger[::-1], 'testing': testing_tasks, 'queue': list(available_tasks) + manual_tasks, 'warning': rds.get('tasks'), 'current_time': datetime.datetime.now(tz=timezone.utc), 'tasks_estimate': tasks_estimate, "slave_estimate": slave_estimate, } return new_render(request, 'tasks.html', context, title='CP0', subtitle='Tasks')
def getmytask(request): try: p = int(request.GET.get('page')) except Exception as e: p = 1 mytasks = Logger.objects.filter(~Q(description='mt'), user=request.user.id)[::-1] mytasks_paginator = CustomPaginator(10, mytasks, 7) try: posts = mytasks_paginator.page(p) except PageNotAnInteger: posts = mytasks_paginator.page(1) except EmptyPage: posts = mytasks_paginator.page(mytasks_paginator.num_pages) page_tabs = mytasks_paginator.get_page_tabs() start_page_tab = p - page_tabs // 2 end_page_tab = p + page_tabs // 2 middle_page_tab = int(page_tabs // 2) if p <= middle_page_tab: start_page_tab = 1 end_page_tab = page_tabs if p > mytasks_paginator.num_pages - middle_page_tab: start_page_tab = mytasks_paginator.num_pages - page_tabs + 1 end_page_tab = mytasks_paginator.num_pages page_html = "" for i in range(start_page_tab, end_page_tab + 3): if i < 1: continue if i > mytasks_paginator.num_pages: break per_page_html = '<button id="p_{0}" class="btn btn-white" onclick="getMytasks({0})">{0}</button>'.format( i) # per_page_html = '<li id="p_{0}"> <a onclick="getMytasks({0})">{0}</a></li>'.format(i) page_html += per_page_html return new_render(request, 'mytask_table.html', { "mytasks": posts, "page_html": page_html })
def pca_data(request): if request.method == 'POST': caselist = byteify(request.POST.getlist('case')) parseconf = byteify(request.POST.getlist('parseconf')) #print parseconf data = OrderedDict() pca = pca_calc(caselist,parseconf[0]) data['correlation_matrix'] = pca.correlation_matrix data['count_mat'] = pca.count_mat # print pca.count_mat data['feature_vector'] = pca.feature_vector data['feature_val'] = pca.feature_val data['weight'] = get_weight(pca.feature_vector,pca.feature_val[0],get_default_valIndex_list(len(pca.keys))) data['collect_effict_data'] = pca.collect_effict_data data['keys'] = pca.keys data['len'] = len(pca.keys) data['coeffient'] = pca.coef_list #print pca.coef_list data['image_score'] = pca.image_score_show data['image_version'] = pca.image_version_show return new_render(request, 'pca_data.html', data, title='CP0', subtitle='PCA')
def comparision(request): # project, build, test_id, case_name, attachment_name # bxtp_ivi_m/20171025_664/20171025_100339/0 cmp_list = request.session.get("cmp", None) print cmp_list case_name= 'full boot' if cmp_list: current = cmp_list[0] project, build, test_id, case, loop = current.split('/') attachment_name = 'report_{0}.csv'.format(loop) else: return HttpResponseRedirect('/') curr_data = ParseReportCSV(project, build, test_id, case_name, attachment_name) curr_raw = curr_data.parse() cmp_data = [] for i, test in enumerate(cmp_list[1:]): new_project, new_build, new_test_id, new_case, new_loop = test.split('/') new_attachment_name = 'report_{0}.csv'.format(new_loop) new = ParseReportCSV(new_project, new_build, new_test_id, case_name, new_attachment_name) if new.parse() is False: print 'parse fail' return HttpResponseRedirect('/') cmp_data.append(new) curr_data.add_cmp_data(new) for i in range(3 - len(cmp_data)): cmp_data.append(None) context = { 'curr_raw': curr_raw, 'curr_data': curr_data, 'cmp_data': cmp_data, 'projects': Project.objects.all() } return new_render(request, 'comparision.html', context=context, title='CP0', subtitle=None)
def getmyfavorites(request): try: p = int(request.GET.get('page')) except Exception as e: p = 1 favoritesTasks = Task.objects.filter(favorites=request.user.id) myfavorites = Logger.objects.filter(task__in=favoritesTasks)[::-1] myfavo_paginator = CustomPaginator(10, myfavorites, 7) try: myfavo_posts = myfavo_paginator.page(p) except PageNotAnInteger: myfavo_posts = myfavo_paginator.page(1) except EmptyPage: myfavo_posts = myfavo_paginator.page(myfavo_paginator.num_pages) page_tabs = myfavo_paginator.get_page_tabs() start_page_tab = p - page_tabs // 2 end_page_tab = p + page_tabs // 2 middle_page_tab = int(page_tabs // 2) if p <= middle_page_tab: start_page_tab = 1 end_page_tab = page_tabs if p > myfavo_paginator.num_pages - middle_page_tab: start_page_tab = myfavo_paginator.num_pages - page_tabs + 1 end_page_tab = myfavo_paginator.num_pages page_html = "" for i in range(start_page_tab, end_page_tab + 3): if i < 1: continue if i > myfavo_paginator.num_pages: break per_page_html = '<button id="pf_{0}" class="btn btn-white" onclick="getMyfavorites({0})">{0}</button>'.format( i) page_html += per_page_html return new_render(request, 'myfavorite_table.html', { "myfavorites": myfavo_posts, "page_html": page_html })
def pca_val_data(request): if request.is_ajax() and request.method == 'POST': case_list = byteify(request.POST.getlist('case')) fea_vactor = byteify(request.POST.getlist('vactor')) fea_vactor = html_list_to_pylist(fea_vactor) fea_value = byteify(request.POST.getlist('values')) fea_value = fea_value[0].strip("'").lstrip("'").split(',') coef = byteify(request.POST.getlist('coefs')) keys = byteify(request.POST.getlist('keys'))[0] # print keys keys=keys.replace("['","").replace("']","").split("', '") fea_vactor = np.mat(fea_vactor) data = OrderedDict() data['casename'] = case_list # print case_list # weight = get_weight(fea_vactor,case_list) data['feature_vector'] = fea_vactor data['weight'] = get_weight(fea_vactor,fea_value,case_list) data['keys'] = keys data['coef'] = coef return new_render(request, 'pca_weight.html', data, title='CP0', subtitle='PCA')
def pca_data_chart(request): try: data = OrderedDict() raw_data, unit = get_pca_raw_data_from_json(force_rewrite=True) keys = unit.keys() data['keys'] = unit.keys() data['raw'] = dict() delete_ebList = get_eb_name_from_mongodb("pca_raw_delete") pca_eb = get_eb_name_from_mongodb("pca_raw") pca_eb.sort() # pca_eb_dict = map(lambda x, y: {x: y},[x for x in xrange(len(pca_eb))], pca_eb) # print pca_eb_dict # print delete_ebList # print pca_eb data["delete_list"] = delete_ebList data["pca_list"] = pca_eb for key in keys: func = partial(parse_raw_data, key) data["raw"][key] = filter(lambda x: not str(x[1]) == "None", map(func, raw_data)) return new_render(request, 'pca_data_chart.html', data, title='CP0', subtitle='PCA', wide=True) except Exception, e: print e return render_to_response('500.html')
def index(request): context = {} return new_render(request, 'statistics.html', context=context, subtitle='Statistics')
def test_template_index(request): templates = ConfigTemplate.objects.filter(user=request.user) splited = [templates[i:i + 3] for i in xrange(0, len(templates), 2)] return new_render(request, 'test_template_index.html', context={'templates': splited})
def null_content(request): return new_render(request, 'empty.html', {})