コード例 #1
0
def upload_test_result_file(request):
    if request.method == 'GET':
        projects = Project.objects.values()
        return render(request, 'upload/test_result_file.html',
                      {'projects': projects})
    csv_file = request.FILES["csv_file"]
    csv_file_fields = request.POST.get('csv_file_fields', '1')
    test_name = request.POST.get('test_name', '1')
    project_id = int(request.POST.get('project_id', '0'))
    # Create new project
    if project_id == 0:
        project = Project(project_name="New project", )
        project.save()
        project_id = project.id
    test = Test(project_id=project_id,
                display_name=test_name,
                show=True,
                start_time=int(time.time() * 1000))
    test.save()
    test_id = test.id
    path = default_storage.save('test_result_data.csv',
                                ContentFile(csv_file.read()))
    csv_file_fields = csv_file_fields.split(',')
    generate_test_results_data(test_id, project_id, path, csv_file_fields)

    return render(request, "upload/success.html", {
        'result': 'ok',
        'test_name': test_name,
        'project_id': project_id,
    })
コード例 #2
0
def add_running_test(root):
    # Parse data from Jenkins Job folder
    build_xml = ElementTree()
    build_parameters = []
    display_name = "unknown"
    start_time = 0
    duration = 0
    project_id = 0
    jmeter_results_path = os.path.join(
        root, "jmeter.jtl")
    monitoring_data = os.path.join(
        root, "monitoring.data")
    build_xml_path = os.path.join(
        root, "build.xml")

    if os.path.isfile(build_xml_path):
        build_xml.parse(build_xml_path)
        build_tag = build_xml.getroot()

        for params in build_tag:
            if params.tag == 'actions':
                parameters = params.find('.//parameters')
                for parameter in parameters:
                    name = parameter.find('name')
                    value = parameter.find('value')
                    build_parameters.append(
                        [name.text, value.text])
            elif params.tag == 'startTime':
                start_time = int(params.text)
            elif params.tag == 'duration':
                duration = int(params.text)
            elif params.tag == 'displayName':
                display_name = params.text
    project_name = re.search('/([^/]+)/builds', root).group(1)
    if not Project.objects.filter(project_name=project_name).exists():
        print "Adding new project: " + project_name
        project = Project(
            project_name=project_name,
            show=True
        )
        project.save()
        project_id = project.id
    print "Project_id: " + str(project_id)
    build_number = int(
        re.search('/builds/(\d+)', root).group(1))
    running_test = TestRunning(
        project_id=project_id,
        build_number=build_number,
        result_file_dest=jmeter_results_path,
        monitoring_file_dest=monitoring_data,
        log_file_dest='',
        display_name=display_name,
        start_time=start_time,
        pid=0,
        jmeter_remote_instances=None,
        workspace=root,
        is_running=True,
    )
    running_test.save()
    return running_test.id
コード例 #3
0
def prepare_test(project_name,
                    workspace,
                    jmeter_dir,
                    threads_num,
                    duration=0,
                    rampup=0,
                    testplan_file='',
                    jenkins_env={}):
    response = {}
    if not Project.objects.filter(project_name=project_name).exists():
        logger.info('Creating a new project: {}.'.format(project_name))
        p = Project(project_name=project_name)
        p.save()
        project_id = p.id
    else:
        p = Project.objects.get(project_name=project_name)
        project_id = p.id
    start_time = int(time.time() * 1000)
    t = TestRunning(
        project_id=project_id,
        start_time=start_time,
        duration=duration,
        workspace=workspace,
        rampup=rampup,
        is_running=False,
        testplan_file_dest=os.path.join(workspace, testplan_file),
        result_file_dest=os.path.join(workspace, 'result.jtl'),
        )
    # Insert CSV writer listener to test plan
    new_testplan_file = prepare_test_plan(t.workspace,
                                          t.testplan_file_dest,
                                          t.result_file_dest,
                                          )
    if new_testplan_file:
        logger.info('New testplan {}.'.format(new_testplan_file))
        t.testplan_file_dest = new_testplan_file
    if jenkins_env:
        logger.info('Setting test build path.')
        t.build_path = os.path.join(
                              jenkins_env['JENKINS_HOME'],
                              'jobs',
                              jenkins_env['JOB_NAME'],
                              jenkins_env['BUILD_NUMBER'],
                            )
        t.build_number = jenkins_env['BUILD_NUMBER']
        t.display_name = jenkins_env['BUILD_DISPLAY_NAME']
    t.save()
    response = {
            'testplan': t.testplan_file_dest,
        }
    return response
コード例 #4
0
def create_project_page(request):
    new_project = Project()
    new_project.save()
    return render(request, 'create_project_page.html', {
        'project': new_project,
    })
コード例 #5
0
def prepare_load_generators(project_name,
                            workspace,
                            jmeter_dir,
                            threads_num,
                            duration,
                            rampup=0,
                            mb_per_thread=0,
                            additional_args='',
                            testplan_file='',
                            jenkins_env={}):
    response = {}
    if not Project.objects.filter(project_name=project_name).exists():
        logger.info('Creating a new project: {}.'.format(project_name))
        p = Project(project_name=project_name)
        p.save()
        project_id = p.id
    else:
        p = Project.objects.get(project_name=project_name)
        project_id = p.id
    start_time = int(time.time() * 1000)
    logger.info('Adding running test instance to DB.')
    t = TestRunning(
        project_id=project_id,
        start_time=start_time,
        duration=duration,
        workspace=workspace,
        rampup=rampup,
        is_running=False,
        testplan_file_dest=os.path.join(workspace, testplan_file),
        result_file_dest=os.path.join(workspace, 'result.jtl'),
    )
    # Insert CSV writer listener to test plan
    new_testplan_file = prepare_test_plan(t.workspace,
                                          t.testplan_file_dest,
                                          t.result_file_dest,
                                          )
    if new_testplan_file:
        logger.info('New testplan {}.'.format(new_testplan_file))
        if project_name != 'TropicalIsland':
            t.testplan_file_dest = new_testplan_file
    if jenkins_env:
        logger.info('Setting test build path.')
        t.build_path = os.path.join(
            jenkins_env['JENKINS_HOME'],
            'jobs',
            jenkins_env['JOB_NAME'],
            jenkins_env['BUILD_NUMBER'],
        )
        t.build_number = jenkins_env['BUILD_NUMBER']
        t.display_name = jenkins_env['BUILD_DISPLAY_NAME']
    t.save()
    test_running_id = t.id
    # get estimated required memory for one thread
    if mb_per_thread == 0:
        mb_per_thread = get_avg_thread_malloc_for_project(
            project_id, threads_num)
    logger.info(
        "Threads_num: {}; mb_per_thread: {}; project_name: {}; jmeter_dir: {}; duration: {}".
        format(threads_num, mb_per_thread, project_name, jmeter_dir, duration))
    matched_load_generators = []
    ready = False

    rmt = 0.0  # recommended_max_threads_per_one_jmeter_instance
    mem_multiplier = 2.0
    if mb_per_thread < 1.5:
        rmt = 500.0
        mem_multiplier = 2
    elif mb_per_thread >= 1.5 and mb_per_thread < 2:
        rmt = 400.0
    elif mb_per_thread >= 2 and mb_per_thread < 5:
        rmt = 300.0
    elif mb_per_thread >= 5 and mb_per_thread < 10:
        rmt = 200.0
    elif mb_per_thread >= 10:
        rmt = 100.0

    logger.info("Threads per jmeter instance: {};".format(rmt))
    logger.debug("ceil1: {};".format(float(threads_num) / rmt))
    logger.debug("ceil2: {};".format(math.ceil(float(threads_num) / rmt)))
    target_amount_jri = int(math.ceil(float(threads_num) / rmt))
    required_memory_for_jri = int(math.ceil(
        mb_per_thread * rmt * mem_multiplier))  # why 2 ? dunno
    required_memory_total = math.ceil(
        target_amount_jri * required_memory_for_jri * 1.2)
    logger.info('HEAP Xmx: {}'.format(required_memory_for_jri))
    logger.info("target_amount_jri: {}; required_memory_total: {}".format(
        target_amount_jri, required_memory_total))
    java_args = "-server -Xms{}m -Xmx{}m -Xss228k -XX:+DisableExplicitGC -XX:+CMSClassUnloadingEnabled -XX:+UseCMSInitiatingOccupancyOnly -XX:CMSInitiatingOccupancyFraction=70 -XX:+ScavengeBeforeFullGC -XX:+CMSScavengeBeforeRemark -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -Djava.net.preferIPv6Addresses=true -Djava.net.preferIPv4Stack=false".format(
        required_memory_for_jri, required_memory_for_jri)

    # java_args = "-server -Xms{}m -Xmx{}m  -Xss228k -XX:+UseConcMarkSweepGC -XX:+CMSParallelRemarkEnabled -XX:+DisableExplicitGC -XX:+CMSClassUnloadingEnabled -XX:+AggressiveOpts -Djava.net.preferIPv6Addresses=true -Djava.net.preferIPv4Stack=false".format(
    #    required_memory_for_jri, required_memory_for_jri)
    # update_load_generators_info()
    # java_args = "-server -Xms{}m -Xmx{}m -XX:+UseG1GC -XX:MaxGCPauseMillis=100 -XX:G1ReservePercent=20 -Djava.net.preferIPv6Addresses=true".format(
    #    required_memory_for_jri, required_memory_for_jri)
    load_generators_info = list(
        LoadGenerator.objects.filter(active=True).values())
    load_generators_count = LoadGenerator.objects.filter(active=True).count()

    running_test_jris = []
    overall_hosts_amount_jri = 0

    threads_per_host = int(float(threads_num) / target_amount_jri)

    if not ready:
        logger.info(
            "Did not a single load generator. Trying to find a combination.")
        t_hosts = {}
        # Try to find a combination of load generators:
        for generator in load_generators_info:
            hostname = generator['hostname']
            num_cpu = float(generator['num_cpu'])
            memory_free = float(generator['memory_free'])
            memory = float(generator['memory'])
            la_1 = float(generator['la_1'])
            la_5 = float(generator['la_5'])
            la_15 = float(generator['la_15'])
            status = generator['status']
            p = memory_free / (required_memory_for_jri * 1.2)
            t_hosts[hostname] = math.ceil(memory_free /
                                          (required_memory_for_jri * 1.2))
        t_sorted_hosts = sorted(t_hosts, key=t_hosts.get, reverse=True)

        # Try to spread them equally on load generators
        estimated_jris_for_host = int(
            math.ceil(float(target_amount_jri) / float(load_generators_count)))
        logger.debug(
            "estimated_jris_for_host: {};".format(estimated_jris_for_host))
        for h in t_sorted_hosts:
            possible_jris_on_host = t_hosts[h]
            if possible_jris_on_host > estimated_jris_for_host:
                possible_jris_on_host = estimated_jris_for_host
            if overall_hosts_amount_jri + possible_jris_on_host > target_amount_jri:
                possible_jris_on_host = target_amount_jri - overall_hosts_amount_jri
            logger.debug("h: {}; possible_jris_on_host: {};".format(
                h, possible_jris_on_host))
            matched_load_generators.append({
                'hostname':
                h,
                'possible_jris_on_host':
                int(possible_jris_on_host)
            })
            overall_hosts_amount_jri += possible_jris_on_host
            logger.debug("overall_hosts_amount_jri: {};".format(
                overall_hosts_amount_jri))
            if overall_hosts_amount_jri >= target_amount_jri:
                ready = True
                break
    if not ready and overall_hosts_amount_jri < target_amount_jri:
        logger.debug(
            "################CREATING NEW LOAD GENERATOR###################")
        n = target_amount_jri - overall_hosts_amount_jri
        required_memory_for_new_generators = n * required_memory_for_jri * 1.3
        logger.debug("required_memory_for_new_generators: {};".format(
            required_memory_for_new_generators))
        num_of_new_load_generators = int(math.ceil(
            float(required_memory_for_new_generators) / 4096))
        logger.debug("num_of_new_load_generators: {};".format(
            num_of_new_load_generators))
        # Create new load generator server if current are not enough
        # for i in xrange(num_of_new_load_generators):
        # for i in xrange(1):
        #    env.host_string = 'generator3.loadtest'
        #    env.game = 'loadtest'
        #    env.task = 'loadgenerator_create'
        #    env.logger = logger
        #    env.ig_execute = ig_execute
        #    loadgenerator_create(4, 4096)
        # prepare_load_generators(project_name, workspace, jmeter_dir,
        #                        threads_num, duration, rampup, mb_per_thread)
    logger.debug(
        "matched_load_generators: {};".format(str(matched_load_generators)))
    data_pool_index = 0
    start_jri_threads = []
    if ready:
        for load_generator in matched_load_generators:
            thread = threading.Thread(
                target=start_jris_on_load_generator,
                args=(
                    load_generator,
                    threads_per_host,
                    test_running_id,
                    project_id,
                    jmeter_dir,
                    java_args,
                    data_pool_index,
                    running_test_jris,
                    additional_args))
            # Increment data pool index by number of jris on started thread
            data_pool_index += load_generator['possible_jris_on_host']
            thread.start()
            start_jri_threads.append(thread)
        for thread in start_jri_threads:
            thread.join()

        t.jmeter_remote_instances = running_test_jris
        t.is_running = True
        t.save()
        final_str = ""
        jmeter_instances = JmeterInstance.objects.annotate(
            hostname=F('load_generator__hostname')).filter(
                test_running_id=test_running_id).values('hostname', 'port')
        for jmeter_instance in jmeter_instances:
            hostname = jmeter_instance['hostname']
            port = jmeter_instance['port']
            final_str += "{}:{},".format(hostname, port)
        final_str = final_str[:-1]
        response = {
            'testplan': t.testplan_file_dest,
            "remote_hosts_string": final_str,
            "threads_per_host": threads_per_host
        }
    return response
コード例 #6
0
 def test_post_change(self, client: Client, project: Project):
     _, data = self.send_post(client, project)
     project.refresh_from_db()
     assert project.name == data['name']