def __init__(self, hadoop_artifact_url, hadoop_install_dir, hadoop_data_dir, template_conf_dir, hostname='localhost', secure_hadoop=False): HadoopUtil.__init__(self, hadoop_artifact_url, hadoop_install_dir, hadoop_data_dir, hostname) self.rpmutil = RPMUtil() self.hostname = hostname self.hadoop_artifact_url = hadoop_artifact_url self.hadoop_install_dir = hadoop_install_dir self.hadoop_binary_loc = '' self.hadoop_data_dir = hadoop_data_dir self.template_conf_dir = template_conf_dir self.secure_hadoop = secure_hadoop # Constants # under the hadoop template configuration directory # both the below directories should be present self.SECURE_DIR_NAME = "conf.secure" # secure configuration files location self.NON_SECURE_DIR_NAME = "conf.pseudo" # non-secure configuration files location self.DEPENDENCY_PKGS = [ "fuse-", # eg. fuse-2.8.3-4.el6.x86_64 "fuse-libs", # eg. fuse-libs-2.8.3-4.el6.x86_6 "nc-" # eg. 1.84-22.el6.x86_64" ] self.HADOOP_UTILITY_RPMS = "utility/rpm/" self.ZOOKEEPER_RPMS = "zookeeper/rpm/" self.HADOOP_RPMS = "hadoop/rpm/" self.HADOOP_ENVS = { "HADOOP_HOME": "/usr/lib/gphd/hadoop/", "HADOOP_COMMON_HOME": "/usr/lib/gphd/hadoop/", "HADOOP_HDFS_HOME": "/usr/lib/gphd/hadoop-hdfs/", "HADOOP_MAPRED_HOME": "/usr/lib/gphd/hadoop-mapreduce/", "YARN_HOME": "/usr/lib/gphd/hadoop-yarn/", "HADOOP_TMP_DIR": "%s/hadoop-hdfs/cache/" % self.hadoop_data_dir, "MAPRED_TMP_DIR": "%s/hadoop-mapreduce/cache/" % self.hadoop_data_dir, "YARN_TMP_DIR": "%s/hadoop-yarn/cache/" % self.hadoop_data_dir, "HADOOP_CONF_DIR": "/etc/hadoop/conf", "HADOOP_LOG_DIR": "%s/hadoop-logs/hadoop-hdfs" % self.hadoop_data_dir, "MAPRED_LOG_DIR": "%s/hadoop-logs/hadoop-mapreduce" % self.hadoop_data_dir, "YARN_LOG_DIR": "%s/hadoop-logs/hadoop-yarn" % self.hadoop_data_dir } self.PKGS_TO_REMOVE = "^hadoop-*|^bigtop-*|^zookeeper-*|^parquet-*"
def __init__(self, hadoop_src_artifact_url, hadoop_install_dir, hadoop_data_dir, template_conf_dir, hostname='localhost', secure_hadoop=False): HadoopUtil.__init__(self, hadoop_src_artifact_url, hadoop_install_dir, hadoop_data_dir, hostname) self.rpmutil = RPMUtil() self.hostname = hostname # we build the hadoop binaries from the source code so as to avoid any issues self.hadoop_src_artifact_url = hadoop_src_artifact_url self.hadoop_install_dir = hadoop_install_dir self.hadoop_binary_loc = '' self.template_conf_dir = template_conf_dir self.secure_hadoop = secure_hadoop # Constants # under the hadoop template configuration directory # both the below directories should be present self.SECURE_DIR_NAME = "conf.secure" # secure configuration files location self.NON_SECURE_DIR_NAME = "conf.pseudo" # non-secure configuration files location
def __init__(self, kdc_host, kdc_domain, krb_template_conf, node_list): self.kdc_host = kdc_host self.kdc_domain = kdc_domain self.krb_template_conf = krb_template_conf if node_list: self.list_of_hosts = node_list.append(kdc_host) else: self.list_of_hosts = [kdc_host] tinctest.logger.info("list_of_hosts - %s" % self.list_of_hosts) self.rpmutil = RPMUtil() self.service_cmd = "sudo /sbin/service" self.kadmin_cmd = "sudo /usr/sbin/kadmin.local -q " self.kdb5_cmd = "/usr/sbin/kdb5_util" self.login_user = self._get_login_user() # get current logged-in user self.KRB_PKG_LIST = ["krb5-server", "krb5-libs", "krb5-workstation"] self.REALM = "HD.PIVOTAL.COM" self.KRB_CONF = "/etc/krb5.conf" self.KRB_CONF_TEMPLATE = "krb5.conf.t" self.KDC_CONF = "/var/kerberos/krb5kdc/kdc.conf" self.KDC_CONF_TEMPLATE = "kdc.conf" self.KADMIN_ACL_CONF = "/var/kerberos/krb5kdc/kadm5.acl" self.KADMIN_ACL_CONF_TEMPLATE = "kadm5.acl" self.PRINCIPALS = ["hdfs", "yarn", "mapred", "HTTP"]