Пример #1
0
def test_jenkins_jobs_branch(local_salt_client):
    config = utils.get_configuration()
    expected_version = config['drivetrain_version'] or []
    if not expected_version or expected_version == '':
        pytest.skip("drivetrain_version is not defined. Skipping")
    jenkins_password = get_password(local_salt_client, 'jenkins:client')
    version_mismatch = []
    server = join_to_jenkins(local_salt_client, 'admin', jenkins_password)
    for job_instance in server.get_jobs():
        job_name = job_instance.get('name')
        job_config = server.get_job_config(job_name)
        xml_data = minidom.parseString(job_config)
        BranchSpec = xml_data.getElementsByTagName(
            'hudson.plugins.git.BranchSpec')
        #We use master branch for pipeline-library in case of 'testing,stable,nighlty' versions
        if expected_version in ['testing', 'nightly', 'stable']:
            expected_version = 'master'
        if BranchSpec:
            actual_version = BranchSpec[0].getElementsByTagName(
                'name')[0].childNodes[0].data
            if (actual_version != expected_version) and (job_name not in [
                    'cvp-func', 'cvp-ha', 'cvp-perf'
            ]):
                version_mismatch.append("Job {0} has {1} branch."
                                        "Expected {2}".format(
                                            job_name, actual_version,
                                            expected_version))
    assert len(version_mismatch) == 0, \
        '''Some DriveTrain jobs have version/branch mismatch:
              {}'''.format(json.dumps(version_mismatch, indent=4))
Пример #2
0
def test_drivetrain_components_and_versions(local_salt_client):
    config = utils.get_configuration()
    version = config['drivetrain_version'] or []
    if not version or version == '':
        pytest.skip("drivetrain_version is not defined. Skipping")
    salt_output = local_salt_client.cmd(
        'I@gerrit:client',
        'cmd.run',
        ['docker service ls'],
        expr_form='compound')
    not_found_services = ['gerrit_db', 'gerrit_server', 'jenkins_master',
                          'jenkins_slave01', 'jenkins_slave02',
                          'jenkins_slave03', 'ldap_admin', 'ldap_server']
    version_mismatch = []
    for line in salt_output[salt_output.keys()[0]].split('\n'):
        for service in not_found_services:
            if service in line:
                not_found_services.remove(service)
                if version != line.split()[4].split(':')[1]:
                    version_mismatch.append("{0}: expected "
                        "version is {1}, actual - {2}".format(service,version,
                                                              line.split()[4].split(':')[1]))
                continue
    assert len(not_found_services) == 0, \
        '''Some DriveTrain components are not found:
              {}'''.format(json.dumps(not_found_services, indent=4))
    assert len(version_mismatch) == 0, \
        '''Version mismatch found:
              {}'''.format(json.dumps(version_mismatch, indent=4))
Пример #3
0
def test_drivetrain_jenkins_job(local_salt_client):
    jenkins_password = get_password(local_salt_client,'jenkins:client')
    server = join_to_jenkins(local_salt_client,'admin',jenkins_password)
    #Getting Jenkins test job name from configuration
    config = utils.get_configuration()
    jenkins_test_job = config['jenkins_test_job']
    if not jenkins_test_job or jenkins_test_job == '':
        jenkins_test_job = 'git-mirror-downstream-mk-pipelines'
    if server.get_job_name(jenkins_test_job):
        next_build_num = server.get_job_info(jenkins_test_job)['nextBuildNumber']
        #If this is first build number skip building check
        if next_build_num != 1:
            #Check that test job is not running at this moment,
            #Otherwise skip the test
            last_build_num = server.get_job_info(jenkins_test_job)['lastBuild'].get('number')
            last_build_status = server.get_build_info(jenkins_test_job,last_build_num)['building']
            if last_build_status:
                pytest.skip("Test job {0} is already running").format(jenkins_test_job)
        #This jenkins module doesn't work with build_job function without parameters
        #Just send some fake parameters. All others will be used from default values
        param_dict = {'foo':'bar'}
        server.build_job(jenkins_test_job, param_dict)
        timeout = 0
        #Use job status True by default to exclude timeout between build job and start job.
        job_status = True
        while job_status and ( timeout < 180 ):
            time.sleep(10)
            timeout += 10
            job_status = server.get_build_info(jenkins_test_job,next_build_num)['building']
        job_result = server.get_build_info(jenkins_test_job,next_build_num)['result']
    else:
        pytest.skip("The job {0} was not found").format(test_job_name)
    assert job_result == 'SUCCESS', \
        '''Test job '{0}' build was not successfull or timeout is too small
         '''.format(jenkins_test_job)
def test_checking_rabbitmq_cluster(local_salt_client):
    # disable config for this test
    # it may be reintroduced in future
    config = utils.get_configuration()
    # request pillar data from rmq nodes
    rabbitmq_pillar_data = local_salt_client.cmd('rabbitmq:server',
                                                 'pillar.data',
                                                 ['rabbitmq:cluster'],
                                                 expr_form='pillar')
    # creating dictionary {node:cluster_size_for_the_node}
    # with required cluster size for each node
    control_dict = {}
    required_cluster_size_dict = {}
    # request actual data from rmq nodes
    rabbit_actual_data = local_salt_client.cmd('rabbitmq:server',
                                               'cmd.run',
                                               ['rabbitmqctl cluster_status'],
                                               expr_form='pillar')
    for node in rabbitmq_pillar_data:
        if node in config.get('skipped_nodes'):
            del rabbit_actual_data[node]
            continue
        cluster_size_from_the_node = len(
            rabbitmq_pillar_data[node]['rabbitmq:cluster']['members'])
        required_cluster_size_dict.update({node: cluster_size_from_the_node})

    # find actual cluster size for each node
    for node in rabbit_actual_data:
        running_nodes_count = 0
        # rabbitmqctl cluster_status output contains
        # 3 * # of nodes 'rabbit@' entries + 1
        running_nodes_count = (rabbit_actual_data[node].count('rabbit@') -
                               1) / 3
        # update control dictionary with values
        # {node:actual_cluster_size_for_node}
        if required_cluster_size_dict[node] != running_nodes_count:
            control_dict.update({node: running_nodes_count})

    assert not len(control_dict), "Inconsistency found within cloud. " \
                                  "RabbitMQ cluster is probably broken, " \
                                  "the cluster size for each node " \
                                  "should be: {} but the following " \
                                  "nodes has other values: {}".format(
        len(required_cluster_size_dict.keys()), control_dict)
Пример #5
0
def test_ntp_sync(local_salt_client):
    testname = os.path.basename(__file__).split('.')[0]
    active_nodes = utils.get_active_nodes(os.path.basename(__file__))
    config = utils.get_configuration()
    fail = {}
    saltmaster_time = int(
        local_salt_client.cmd('salt:master',
                              'cmd.run', ['date +%s'],
                              expr_form='pillar').values()[0])
    nodes_time = local_salt_client.cmd(utils.list_to_target_string(
        active_nodes, 'or'),
                                       'cmd.run', ['date +%s'],
                                       expr_form='compound')
    diff = config.get(testname)["time_deviation"] or 30
    for node, time in nodes_time.iteritems():
        if (int(time) - saltmaster_time) > diff or \
                (int(time) - saltmaster_time) < -diff:
            fail[node] = time

    assert not fail, 'SaltMaster time: {}\n' \
                     'Nodes with time mismatch:\n {}'.format(saltmaster_time,
                                                             fail)
def test_drivetrain_jenkins_job(local_salt_client):
    jenkins_password = get_password(local_salt_client, 'jenkins:client')
    server = join_to_jenkins(local_salt_client, 'admin', jenkins_password)
    #Getting Jenkins test job name from configuration
    config = utils.get_configuration()
    jenkins_test_job = config['jenkins_test_job']
    if not server.get_job_name(jenkins_test_job):
        server.create_job(jenkins_test_job, jenkins.EMPTY_CONFIG_XML)
    if server.get_job_name(jenkins_test_job):
        next_build_num = server.get_job_info(
            jenkins_test_job)['nextBuildNumber']
        #If this is first build number skip building check
        if next_build_num != 1:
            #Check that test job is not running at this moment,
            #Otherwise skip the test
            last_build_num = server.get_job_info(
                jenkins_test_job)['lastBuild'].get('number')
            last_build_status = server.get_build_info(
                jenkins_test_job, last_build_num)['building']
            if last_build_status:
                pytest.skip("Test job {0} is already running").format(
                    jenkins_test_job)
        server.build_job(jenkins_test_job)
        timeout = 0
        #Use job status True by default to exclude timeout between build job and start job.
        job_status = True
        while job_status and (timeout < 180):
            time.sleep(10)
            timeout += 10
            job_status = server.get_build_info(jenkins_test_job,
                                               next_build_num)['building']
        job_result = server.get_build_info(jenkins_test_job,
                                           next_build_num)['result']
    else:
        pytest.skip("The job {0} was not found").format(test_job_name)
    assert job_result == 'SUCCESS', \
        '''Test job '{0}' build was not successfull or timeout is too small
         '''.format(jenkins_test_job)
Пример #7
0
def test_jenkins_jobs_branch(local_salt_client):
    config = utils.get_configuration()
    expected_version = config['drivetrain_version'] or []
    if not expected_version or expected_version == '':
        pytest.skip("drivetrain_version is not defined. Skipping")
    jenkins_password = local_salt_client.cmd(
        'jenkins:client',
        'pillar.get',
        ['_param:openldap_admin_password'],
        expr_form='pillar').values()[0]
    jenkins_port = local_salt_client.cmd(
        'I@jenkins:client and not I@salt:master',
        'pillar.get',
        ['_param:haproxy_jenkins_bind_port'],
        expr_form='compound').values()[0]
    jenkins_address = local_salt_client.cmd(
        'I@jenkins:client and not I@salt:master',
        'pillar.get',
        ['_param:haproxy_jenkins_bind_host'],
        expr_form='compound').values()[0]
    version_mismatch = []
    jenkins_url = 'http://{0}:{1}'.format(jenkins_address,jenkins_port)
    server = Jenkins(jenkins_url, username='******', password=jenkins_password)
    for job_name, job_instance in server.get_jobs():
        job_config = job_instance.get_config()
        xml_data = minidom.parseString(job_config)
        BranchSpec = xml_data.getElementsByTagName('hudson.plugins.git.BranchSpec')
        if BranchSpec:
            actual_version = BranchSpec[0].getElementsByTagName('name')[0].childNodes[0].data
            if actual_version != expected_version and 'master' not in actual_version:
                version_mismatch.append("Job {0} has {1} branch."
                                        "Expected {2}".format(job_instance.name,
                                                              actual_version,
                                                              expected_version))
    assert len(version_mismatch) == 0, \
        '''Some DriveTrain jobs have version/branch mismatch:
              {}'''.format(json.dumps(version_mismatch, indent=4))
def test_gerrit_repositories(local_salt_client):
    missing_repos = []
    config = utils.get_configuration()
    gerrit_password = local_salt_client.cmd('I@gerrit:client',
                                            'pillar.get',
                                            ['_param:openldap_admin_password'],
                                            expr_form='compound').values()[0]
    gerrit_port = local_salt_client.cmd('I@gerrit:client',
                                        'pillar.get',
                                        ['gerrit:client:server:http_port'],
                                        expr_form='compound').values()[0]
    gerrit_address = local_salt_client.cmd('I@gerrit:client',
                                           'pillar.get',
                                           ['gerrit:client:server:host'],
                                           expr_form='compound').values()[0]
    gerrit_protocol = local_salt_client.cmd('I@gerrit:client',
                                            'pillar.get',
                                            ['gerrit:client:server:protocol'],
                                            expr_form='compound').values()[0]

    auth = HTTPBasicAuth('admin', gerrit_password)
    rest = GerritRestAPI(url="{0}://{1}:{2}".format(gerrit_protocol,
                                                    gerrit_address,
                                                    gerrit_port),
                         auth=auth)

    for repo in config['drivetrain_repos']:
        repoHttp = repo.replace("/", "%2F")
        try:
            response = rest.get("/projects/{0}".format(repoHttp))
        except requests.exceptions.HTTPError as e:
            missing_repos.append("Repo {0} is missing".format(repo))


    assert len(missing_repos) == 0, \
        '''Some repositories in Gerrit are missing:
              {}'''.format(json.dumps(missing_repos, indent=4))
Пример #9
0
def test_drivetrain_openldap(local_salt_client):
    '''Create a test user 'DT_test_user' in openldap,
    add the user to admin group, login using the user to Jenkins.
    Add the user to devops group in Gerrit and then login to Gerrit,
    using test_user credentials. Finally, delete the user from admin
    group and openldap
    '''
    ldap_password = get_password(local_salt_client, 'openldap:client')
    #Check that ldap_password is exists, otherwise skip test
    if not ldap_password:
        pytest.skip("Openldap service or openldap:client pillar \
        are not found on this environment.")
    ldap_port = local_salt_client.cmd(
        'I@openldap:client and not I@salt:master',
        'pillar.get', ['_param:haproxy_openldap_bind_port'],
        expr_form='compound').values()[0]
    ldap_address = local_salt_client.cmd(
        'I@openldap:client and not I@salt:master',
        'pillar.get', ['_param:haproxy_openldap_bind_host'],
        expr_form='compound').values()[0]
    ldap_dc = local_salt_client.cmd('openldap:client',
                                    'pillar.get', ['_param:openldap_dn'],
                                    expr_form='pillar').values()[0]
    ldap_con_admin = local_salt_client.cmd(
        'openldap:client',
        'pillar.get', ['openldap:client:server:auth:user'],
        expr_form='pillar').values()[0]
    ldap_url = 'ldap://{0}:{1}'.format(ldap_address, ldap_port)
    ldap_error = ''
    ldap_result = ''
    gerrit_result = ''
    gerrit_error = ''
    jenkins_error = ''
    #Test user's CN
    test_user_name = 'DT_test_user'
    test_user = '******'.format(test_user_name, ldap_dc)
    #Admins group CN
    admin_gr_dn = 'cn=admins,ou=groups,{0}'.format(ldap_dc)
    #List of attributes for test user
    attrs = {}
    attrs['objectclass'] = [
        'organizationalRole', 'simpleSecurityObject', 'shadowAccount'
    ]
    attrs['cn'] = test_user_name
    attrs['uid'] = test_user_name
    attrs['userPassword'] = '******'
    attrs['description'] = 'Test user for CVP DT test'
    searchFilter = 'cn={0}'.format(test_user_name)
    #Get a test job name from config
    config = utils.get_configuration()
    jenkins_test_job = config['jenkins_test_job']
    if not jenkins_test_job or jenkins_test_job == '':
        jenkins_test_job = 'git-mirror-downstream-pipeline-library'
    #Open connection to ldap and creating test user in admins group
    try:
        ldap_server = ldap.initialize(ldap_url)
        ldap_server.simple_bind_s(ldap_con_admin, ldap_password)
        ldif = modlist.addModlist(attrs)
        ldap_server.add_s(test_user, ldif)
        ldap_server.modify_s(
            admin_gr_dn,
            [(
                ldap.MOD_ADD,
                'memberUid',
                [test_user_name],
            )],
        )
        #Check search test user in LDAP
        searchScope = ldap.SCOPE_SUBTREE
        ldap_result = ldap_server.search_s(ldap_dc, searchScope, searchFilter)
    except ldap.LDAPError, e:
        ldap_error = e
Пример #10
0
def test_mtu(local_salt_client, nodes_in_group):
    testname = os.path.basename(__file__).split('.')[0]
    config = utils.get_configuration()
    skipped_ifaces = config.get(testname)["skipped_ifaces"] or \
        ["bonding_masters", "lo", "veth", "tap", "cali"]
    total = {}
    network_info = local_salt_client.cmd("L@" + ','.join(nodes_in_group),
                                         'cmd.run', ['ls /sys/class/net/'],
                                         expr_form='compound')

    kvm_nodes = local_salt_client.cmd('salt:control',
                                      'test.ping',
                                      expr_form='pillar').keys()

    if len(network_info.keys()) < 2:
        pytest.skip("Nothing to compare - only 1 node")

    for node, ifaces_info in network_info.iteritems():
        if node in kvm_nodes:
            kvm_info = local_salt_client.cmd(node, 'cmd.run', [
                "virsh list | "
                "awk '{print $2}' | "
                "xargs -n1 virsh domiflist | "
                "grep -v br-pxe | grep br- | "
                "awk '{print $1}'"
            ])
            ifaces_info = kvm_info.get(node)
        node_ifaces = ifaces_info.split('\n')
        ifaces = {}
        for iface in node_ifaces:
            for skipped_iface in skipped_ifaces:
                if skipped_iface in iface:
                    break
            else:
                iface_mtu = local_salt_client.cmd(
                    node, 'cmd.run',
                    ['cat /sys/class/'
                     'net/{}/mtu'.format(iface)])
                ifaces[iface] = iface_mtu.get(node)
        total[node] = ifaces

    nodes = []
    mtu_data = []
    my_set = set()

    for node in total:
        nodes.append(node)
        my_set.update(total[node].keys())
    for interf in my_set:
        diff = []
        row = []
        for node in nodes:
            if interf in total[node].keys():
                diff.append(total[node][interf])
                row.append("{}: {}".format(node, total[node][interf]))
            else:
                row.append("{}: No interface".format(node))
        if diff.count(diff[0]) < len(nodes):
            row.sort()
            row.insert(0, interf)
            mtu_data.append(row)
    assert len(mtu_data) == 0, \
        "Several problems found: {0}".format(
        json.dumps(mtu_data, indent=4))