def generateTestReportConf(infile, outfile, results, startTime, endTime): config = ConfigParser() config.optionxform = str config.read(infile) if config.has_section(SECTION): # set the version to 2.0 so new keys are processed config.set(SECTION, 'report_version', '2.0') # Stores the original component value, some testsuite runs like HiveServer2Concurr, Sqoop # change this for reporting, but we need to preserve for log archiving for uniqueness config.set(SECTION, "TESTSUITE_COMPONENT", config.get(SECTION, "COMPONENT")) for option, value in config.items(SECTION): try: if ((option != "SECURE" and value != "") or (Config.getEnv("HDP_STACK_INSTALLED").lower() == "false" and value != "")): continue elif option == "BUILD_ID": # if REPO_URL is not set, set the BUILD_ID to 0 # otherwise get the BUILD_ID from the file if config.get(SECTION, "REPO_URL") == "" or not config.has_option(SECTION, "REPO_URL"): config.set(SECTION, option, 0) else: config.set(SECTION, option, getBuildId(config.get(SECTION, "REPO_URL"))) elif option == "HOSTNAME": config.set(SECTION, option, socket.getfqdn()) elif option == "COMPONENT_VERSION": if not config.has_option(SECTION, "COMPONENT") or config.get(SECTION, "COMPONENT") == "": config.set(SECTION, "COMPONENT", "Hadoop") if "ambarieu-hdf" in config.get(SECTION, "COMPONENT"): config.set(SECTION, option, getComponentVersion(config.get(SECTION, "COMPONENT"))) elif "ambari" in config.get(SECTION, "COMPONENT"): config.set(SECTION, option, getComponentVersion("Ambari")) else: config.set(SECTION, option, getComponentVersion(config.get(SECTION, "COMPONENT"))) elif option == "OS": if Machine.isWindows(): cmd = 'powershell (Get-WmiObject -class Win32_OperatingSystem).Caption' _exit_code, stdout = Machine.runasDeprecated( user=Machine.getAdminUser(), cmd=cmd, passwd=Machine.getAdminPasswd() ) config.set(SECTION, option, stdout) continue osname = platform.dist()[0] # hack to check for oracle os as there # is no diff for python if os.path.exists('/etc/oracle-release'): osname = 'oracle' ver = platform.dist()[1] # Need a hack for SLES as python cannot determine 11.1 vs 11.3 if osname.lower() == 'suse': # read the file /etc/SuSE-release and determine the patch version. f = open('/etc/SuSE-release', 'r') txt = f.read() f.close() # get the patch level. For example # PATCHLEVEL = 3 m = re.search('PATCHLEVEL = (.*)', txt, re.MULTILINE) # if you find a match append to the version string if m and m.group(1): ver = '%s.%s' % (ver, m.group(1)) arch = platform.architecture()[0] if os.path.exists('/etc/os-release'): try: f = open('/etc/os-release', 'r') txt = f.read() f.close() m = re.search('NAME="(.*)"', txt, re.MULTILINE) if m and m.group(1): if m.group(1) == "Amazon Linux": osname = "amazonlinux" m = re.search('VERSION="(.*)"', txt, re.MULTILINE) if m and m.group(1): ver = m.group(1) if "2 (2017.12)" in ver: ver = "2" # the amzn ami which qe team is using is of 64 bit arch = "64bit" except Exception: logger.error(traceback.format_exc()) config.set(SECTION, option, '%s-%s-%s' % (osname, ver, arch)) elif option == "HDP_STACK": if "ambari" in config.get(SECTION, "COMPONENT"): from beaver.component.ambari import Ambari hdpVersion = Ambari.getHDPVersion() if hdpVersion and hdpVersion[0] in ('1', '2'): config.set(SECTION, option, "h" + hdpVersion[0]) else: config.set(SECTION, option, 'h2') else: hadoopVersion = getComponentVersion("Hadoop") if hadoopVersion and hadoopVersion[0] in ('1', '2'): config.set(SECTION, option, "h" + hadoopVersion[0]) elif option == "TDE": from beaver.component.hadoop import HDFS2 if HDFS2.isKMSEnabled(): config.set(SECTION, option, "on") else: config.set(SECTION, option, "off") elif option == "SECURE": if "ambari" in config.get(SECTION, "COMPONENT"): from beaver.component.ambari import Ambari config.set(SECTION, option, str(Ambari.isSecure()).lower()) secure_str = str(Ambari.isSecure()).lower() else: from beaver.component.hadoop import Hadoop secure_str = str(Hadoop.isSecure()).lower() if config.get(SECTION, "COMPONENT") == "HiveServer2Concurr": config.set(SECTION, "hs2_authorization", "SQL Standard") if Hadoop.isSecure(): config.set(SECTION, "hs2_authentication", "Kerberos") else: config.set(SECTION, "hs2_authentication", "Unsecure") config.set(SECTION, "hs2_transport", "Binary") config.set(SECTION, "hs2_ssl", "false") config.set(SECTION, "hs2_trusted_proxy", "false") elif config.get(SECTION, "COMPONENT") == "HiveServer2ConcurrHTTP": if Hadoop.isEncrypted(): secure_str += "-http-en" else: secure_str += "-http" config.set(SECTION, "hs2_authorization", "SQL Standard") if Hadoop.isSecure(): config.set(SECTION, "hs2_authentication", "Kerberos") else: config.set(SECTION, "hs2_authentication", "Unsecure") config.set(SECTION, "hs2_transport", "HTTP") config.set(SECTION, "hs2_ssl", "false") config.set(SECTION, "hs2_trusted_proxy", "false") elif config.get(SECTION, "COMPONENT") == "HiveServer2ConcurrLDAP": if Hadoop.isEncrypted(): secure_str += "-ldap-en" else: secure_str += "-ldap" config.set(SECTION, "hs2_authorization", "SQL Standard") config.set(SECTION, "hs2_authentication", "LDAP") config.set(SECTION, "hs2_transport", "Binary") config.set(SECTION, "hs2_ssl", "false") config.set(SECTION, "hs2_trusted_proxy", "false") elif config.get(SECTION, "COMPONENT") == "HiveServer2ConcurrLDAPHTTP": if Hadoop.isEncrypted(): secure_str += "-ldap-http-en" else: secure_str += "-ldap-http" config.set(SECTION, "hs2_authorization", "SQL Standard") config.set(SECTION, "hs2_authentication", "LDAP") config.set(SECTION, "hs2_transport", "HTTP") config.set(SECTION, "hs2_ssl", "false") config.set(SECTION, "hs2_trusted_proxy", "false") elif config.get(SECTION, "COMPONENT") == "HiveServer2ConcurrSSL": if Hadoop.isEncrypted(): secure_str += "-ssl-en" else: secure_str += "-ssl" config.set(SECTION, "hs2_authorization", "SQL Standard") config.set(SECTION, "hs2_authentication", "Unsecure") config.set(SECTION, "hs2_transport", "Binary") config.set(SECTION, "hs2_ssl", "true") config.set(SECTION, "hs2_trusted_proxy", "false") elif config.get(SECTION, "COMPONENT") == "HiveServer2ConcurrSSLHTTP": if Hadoop.isEncrypted(): secure_str += "-ssl-http-en" else: secure_str += "-ssl-http" config.set(SECTION, "hs2_authorization", "SQL Standard") config.set(SECTION, "hs2_authentication", "Unsecure") config.set(SECTION, "hs2_transport", "HTTP") config.set(SECTION, "hs2_ssl", "true") config.set(SECTION, "hs2_trusted_proxy", "false") elif config.get(SECTION, "COMPONENT") == "HiveServer2ConcurrTPUser": if Hadoop.isEncrypted(): secure_str += "-tpuser-en" else: secure_str += "-tpuser" config.set(SECTION, "hs2_authorization", "SQL Standard") config.set(SECTION, "hs2_authentication", "Kerberos") config.set(SECTION, "hs2_transport", "Binary") config.set(SECTION, "hs2_ssl", "false") config.set(SECTION, "hs2_trusted_proxy", "true") elif config.get(SECTION, "COMPONENT") == "HiveServer2ConcurrLongRunning": if Hadoop.isEncrypted(): secure_str += "-longrun-en" else: secure_str += "-longrun" config.set(SECTION, "hs2_authorization", "SQL Standard") if Hadoop.isSecure(): config.set(SECTION, "hs2_authentication", "Kerberos") else: config.set(SECTION, "hs2_authentication", "Unsecure") config.set(SECTION, "hs2_transport", "Binary") config.set(SECTION, "hs2_ssl", "false") config.set(SECTION, "hs2_trusted_proxy", "false") elif config.get(SECTION, "COMPONENT") == "SqoopDb2": config.set(SECTION, "COMPONENT", "Sqoop") else: if Hadoop.isEncrypted(): secure_str += '-en' config.set(SECTION, option, secure_str) elif option == "BLOB": pass elif option == "RAN": # dont add skipped, just pass + fail + aborted config.set(SECTION, option, results[0] + len(results[1]) + results[3]) elif option == "PASS": config.set(SECTION, option, results[0]) elif option == "FAIL": config.set(SECTION, option, len(results[1])) elif option == "SKIPPED": config.set(SECTION, option, results[2]) elif option == "ABORTED": config.set(SECTION, option, results[3]) elif option == "FAILED_TESTS": failedTests = ",".join(results[1]) failureSummary = ReportHelper.getFailureSummary(failedTests) config.set(SECTION, "FAILURE_SUMMARY", failureSummary) tmpFailedTests = ReportHelper.getGroupedFailedTests(failedTests) config.set(SECTION, option, ReportHelper.getMergedFailedTests(tmpFailedTests, failureSummary)) elif option == "NUM_OF_DATANODES": if "ambari" in config.get(SECTION, "COMPONENT"): config.set(SECTION, option, "N/A") else: from beaver.component.hadoop import HDFS config.set(SECTION, option, HDFS.getDatanodeCount()) elif option == "BUILD_URL": if 'BUILD_URL' in os.environ: config.set(SECTION, option, os.environ['BUILD_URL']) elif option == "HDP_RELEASE": # If RU/RB, we must override HDP_RELEASE # (we can't fix this with product front. Discussed in BUG-31369.) if config.get(SECTION, "TESTSUITE_COMPONENT").lower() in ["rollingupgrade", "rollback", "rollingupgrade-ha", "rollback-ha"]: config.set(SECTION, option, "dal") else: config.set(SECTION, option, getRepoId(config.get(SECTION, "REPO_URL"))) elif option == "JDK": config.set(SECTION, option, Machine.getJDK()) elif option == "DB": if not config.has_option(SECTION, "COMPONENT") or config.get(SECTION, "COMPONENT") == "": config.set(SECTION, "COMPONENT", "Hadoop") config.set(SECTION, option, getDatabaseFlavor(config.get(SECTION, "COMPONENT"))) except Exception as error: logger.error("ERROR processing option: %s", option) logger.error("Exception: %s", error) # make sure Hadoop is installed before append Tez to the component name if Config.getEnv("HDP_STACK_INSTALLED").lower() == "true" and config.has_option(SECTION, "COMPONENT"): if "ambari" in config.get(SECTION, "COMPONENT"): kerberos_server_type = 'n/a' from beaver.component.ambari import Ambari if Ambari.isSecure(): kerberos_server_type = 'mit' config.set(SECTION, 'kerberos_server_type', kerberos_server_type) else: from beaver.component.hadoop import Hadoop, HDFS from beaver.component.slider import Slider # set execution_framework. New columns for dashboard v2 # TODO: This needs to be improved to be component specific. if Hadoop.isTez(): if Slider.isInstalled(): config.set(SECTION, 'execution_framework', 'tez-slider') else: config.set(SECTION, 'execution_framework', 'tez') else: if Slider.isInstalled(): config.set(SECTION, 'execution_framework', 'mr-slider') else: config.set(SECTION, 'execution_framework', 'mr') # set wire_encryption # TODO: This needs to be improved to be component specific. if Hadoop.isEncrypted(): config.set(SECTION, 'wire_encryption', 'true') else: config.set(SECTION, 'wire_encryption', 'false') # set kerberos_server_type kerberos_server_type = 'n/a' if Hadoop.isSecure(): kerberos_server_type = 'mit' # add a check for AD if Machine.isLinux(): gateway = Config.get("machine", "GATEWAY") Machine.copyToLocal(Machine.getAdminUser(), gateway, '/etc/krb5.conf', '/tmp/krb5.conf') f = open('/tmp/krb5.conf', 'r') txt = f.read() f.close() #Finding all the admin_server in the krb5.conf with ports, if any p = re.compile('admin_server = ((?!FILE).*)') admin_server_list_with_ports = p.findall(txt) admin_server_list = [] for admin_server_with_port in admin_server_list_with_ports: admin_server_list.append(admin_server_with_port.split(':')[0]) #If len is greater than 1, first checking if one of the admin server is AD host, # than to ensure that not all the hosts are AD hosts, checking if one of the admin # server is not in AD Hosts Lists. if len(admin_server_list) > 1: for ad_host in AD_HOSTS_LIST: if ad_host in admin_server_list: for admin_server in admin_server_list: if admin_server not in AD_HOSTS_LIST: kerberos_server_type = 'ad+mit' break else: for ad_host in AD_HOSTS_LIST: if ad_host in admin_server_list: kerberos_server_type = 'ad' break config.set(SECTION, 'kerberos_server_type', kerberos_server_type) try: from beaver.component.xa import Xa # set argus. New column for dashboard v2 if Xa.isArgus(): config.set(SECTION, 'argus', 'true') else: config.set(SECTION, 'argus', 'false') except Exception as error: logger.error("ERROR processing argus") logger.error("Exception: %s", error) #set TDE if HDFS.isKMSEnabled(): config.set(SECTION, 'tde', 'true') else: config.set(SECTION, 'tde', 'false') config.set(SECTION, 'START_TIME', startTime) config.set(SECTION, 'END_TIME', endTime) coverage_summary_file = os.path.join(Config.getEnv('ARTIFACTS_DIR'), "coverage_summary.json") if os.path.exists(coverage_summary_file): fp = open(coverage_summary_file, "r") json_str = "\n".join(fp.readlines()) fp.close() coverage_summary = json.loads(json_str) for key, value in coverage_summary.items(): config.set(SECTION, key, value) config.write(open(outfile, 'w')) elif config.has_section('SANDBOX'): out_config = ConfigParser() out_config.optionxform = str out_config.add_section(SECTION) sb_type = config.get('SANDBOX', 'vm_env') out_config.set(SECTION, 'BUILD_ID', '0') ova_uri = '' if sb_type == 'VBOX': ova_uri = config.get(sb_type, 'vbox_ova_uri') elif sb_type == 'FUSION': ova_uri = config.get(sb_type, 'fus_ova_uri') if sb_type == 'HYPERV': ova_uri = config.get(sb_type, 'hyperv_ova_uri') out_config.set(SECTION, 'REPO_URL', ova_uri) sb_host = '' if os.name != 'nt': sb_host = os.popen("hostname -f").read().strip() sb_host = sb_host + '(' + os.popen("ifconfig en0 | grep 'inet ' | awk -F ' ' '{print $2}'" ).read().strip() + ')' else: sb_host = 'Kiev local host' out_config.set(SECTION, 'HOSTNAME', sb_host) out_config.set(SECTION, 'HDP_STACK', "h" + (config.get('VERSIONS', 'hadoop_version')[0])) out_config.set(SECTION, 'COMPONENT', 'SANDBOX') out_config.set(SECTION, 'TESTSUITE_COMPONENT', 'SANDBOX') if sb_type == 'HYPERV': sb_ver = 'hyper-v' else: tmp = ['%20', 'Hortonworks', 'VirtualBox', '.ova', 'VMware', '_'] sb_ver = ova_uri.split('/')[5] for rstr in tmp: sb_ver = sb_ver.replace(rstr, '') out_config.set(SECTION, 'COMPONENT_VERSION', sb_ver) out_config.set(SECTION, 'CHECKSUM', 'N/A') ver_num = os.popen("sw_vers | grep 'ProductVersion:' | awk -F ' ' '{print $2}'").read().strip() if sb_type == 'HYPERV': out_config.set(SECTION, 'OS', 'Windows 8.1') else: out_config.set(SECTION, 'OS', 'MAC OS X ' + ver_num) out_config.set(SECTION, 'SECURE', 'false') out_config.set(SECTION, 'TYPE', 'System, UI') out_config.set(SECTION, 'BLOB', 'N/A') out_config.set(SECTION, 'PKG', 'OVA') if sb_type == 'VBOX': out_config.set(SECTION, 'INSTALLER', 'Oracle VirtualBox') elif sb_type == 'FUSION': out_config.set(SECTION, 'INSTALLER', 'VMWare Fusion') elif sb_type == 'HYPERV': out_config.set(SECTION, 'INSTALLER', 'Windows Hyper-V') out_config.set(SECTION, 'RAN', results[0] + len(results[1]) + results[3]) out_config.set(SECTION, 'PASS', results[0]) out_config.set(SECTION, 'FAIL', len(results[1])) out_config.set(SECTION, 'SKIPPED', results[2]) out_config.set(SECTION, 'ABORTED', results[3]) out_config.set(SECTION, 'FAILED_DEPENDENCY', 'N/A') out_config.set(SECTION, 'FAILED_TESTS', ",".join(results[1])) out_config.set(SECTION, 'NUM_OF_DATANODES', '1') out_config.set(SECTION, 'HDP_RELEASE', ova_uri.split('/')[4]) out_config.set(SECTION, 'JDK', '1.6.0_51') out_config.set(SECTION, 'DB', 'N/A') out_config.set(SECTION, 'BROWSER', config.get('SANDBOX', 'test_browser')) out_config.write(open(outfile, 'w'))
# company and Hortonworks, Inc, any use, reproduction, modification, # redistribution, sharing, lending or other exploitation of all or # any part of the contents of this file is strictly prohibited. # # from beaver.config import Config from beaver.machine import Machine from beaver.component.ambari import Ambari from beaver.component.hadoop import Hadoop, YARN from beaver.component.zookeeper import Zookeeper from beaver import util import os, re, random, logging, time, ast from taskreporter.taskreporter import TaskReporter logger = logging.getLogger(__name__) HDP_Version = re.search(r'\d.\d.\d.(\d.\d.\d).*', Ambari.getHDPVersion()).group(1) class Hive1: _hiveHome = '/usr/hdp/current/hive-client' _hiveMetastoreHosts = None _hiveServer2Hosts = None _hiveMetastorePort = None _hiveServer2Port = None _hiveLogDir = None _compSvcMap = {'metastore': 'HIVE_METASTORE', 'hiveserver2': 'HIVE_SERVER'} # hiveserver timeout to be reverted back to 300, once schematool run restricted to install/upgrade _compSvcTimeouts = {'metastore': 300, 'hiveserver2': 600} _ambariConfigMap = { 'hive-site.xml': 'hive-site', 'hiveserver2-site.xml': 'hiveserver2-site',