def flume(action=None): import params from service_mapping import flume_win_service_name if action == 'config': ServiceConfig(flume_win_service_name, action="configure", start_type="manual") ServiceConfig(flume_win_service_name, action="change_user", username=params.flume_user, password=Script.get_password(params.flume_user)) # remove previously defined meta's for n in find_expected_agent_names(params.flume_conf_dir): os.unlink( os.path.join(params.flume_conf_dir, n, 'ambari-meta.json')) flume_agents = {} if params.flume_conf_content is not None: flume_agents = build_flume_topology(params.flume_conf_content) for agent in flume_agents.keys(): flume_agent_conf_dir = os.path.join(params.flume_conf_dir, agent) flume_agent_conf_file = os.path.join(flume_agent_conf_dir, 'flume.conf') flume_agent_meta_file = os.path.join(flume_agent_conf_dir, 'ambari-meta.json') flume_agent_log4j_file = os.path.join(flume_agent_conf_dir, 'log4j.properties') flume_agent_env_file = os.path.join(flume_agent_conf_dir, 'flume-env.ps1') Directory(flume_agent_conf_dir) PropertiesFile(flume_agent_conf_file, properties=flume_agents[agent]) File(flume_agent_log4j_file, content=InlineTemplate(params.flume_log4j_content, agent_name=agent)), File(flume_agent_meta_file, content=json.dumps(ambari_meta(agent, flume_agents[agent]))) File(flume_agent_env_file, owner=params.flume_user, content=InlineTemplate(params.flume_env_sh_template)) if params.has_metric_collector: File(os.path.join(flume_agent_conf_dir, "flume-metrics2.properties"), owner=params.flume_user, content=Template("flume-metrics2.properties.j2"))
def yarn(name = None): import params XmlConfig("mapred-site.xml", conf_dir=params.config_dir, configurations=params.config['configurations']['mapred-site'], owner=params.yarn_user, mode='f' ) XmlConfig("yarn-site.xml", conf_dir=params.config_dir, configurations=params.config['configurations']['yarn-site'], owner=params.yarn_user, mode='f', configuration_attributes=params.config['configuration_attributes']['yarn-site'] ) XmlConfig("capacity-scheduler.xml", conf_dir=params.config_dir, configurations=params.config['configurations']['capacity-scheduler'], owner=params.yarn_user, mode='f' ) if params.service_map.has_key(name): service_name = params.service_map[name] ServiceConfig(service_name, action="change_user", username = params.yarn_user, password = Script.get_password(params.yarn_user))
def falcon(type, action = None, upgrade_type=None): import params if action == 'config': env = Environment.get_instance() # These 2 parameters are used in ../templates/client.properties.j2 env.config.params["falcon_host"] = params.falcon_host env.config.params["falcon_port"] = params.falcon_port File(os.path.join(params.falcon_conf_dir, 'falcon-env.sh'), content = InlineTemplate(params.falcon_env_sh_template)) PropertiesFile(os.path.join(params.falcon_conf_dir, 'runtime.properties'), properties = params.falcon_runtime_properties) PropertiesFile(os.path.join(params.falcon_conf_dir, 'startup.properties'), properties = params.falcon_startup_properties) PropertiesFile(os.path.join(params.falcon_conf_dir, 'client.properties'), properties = params.falcon_client_properties) if type == 'server': ServiceConfig(params.falcon_win_service_name, action = "change_user", username = params.falcon_user, password = Script.get_password(params.falcon_user)) if action == 'start': Service(params.falcon_win_service_name, action = "start") if action == 'stop': Service(params.falcon_win_service_name, action = "stop")
def zookeeper(type=None, upgrade_type=None): import params configFile("zoo.cfg", template_name="zoo.cfg.j2", mode="f") configFile("configuration.xsl", template_name="configuration.xsl.j2", mode="f") ServiceConfig(params.zookeeper_win_service_name, action="change_user", username=params.zk_user, password=Script.get_password(params.zk_user)) Directory(params.zk_data_dir, owner=params.zk_user, mode="(OI)(CI)F", create_parents=True) if (params.log4j_props != None): File(os.path.join(params.config_dir, "log4j.properties"), mode="f", owner=params.zk_user, content=params.log4j_props) elif (os.path.exists(os.path.join(params.config_dir, "log4j.properties"))): File(os.path.join(params.config_dir, "log4j.properties"), mode="f", owner=params.zk_user) if type == 'server': myid = str(sorted(params.zookeeper_hosts).index(params.hostname) + 1) File(os.path.join(params.zk_data_dir, "myid"), owner=params.zk_user, mode="f", content=myid)
def knox(): import params XmlConfig( "gateway-site.xml", conf_dir=params.knox_conf_dir, configurations=params.config['configurations']['gateway-site'], configuration_attributes=params.config['configuration_attributes'] ['gateway-site'], owner=params.knox_user) # Manually overriding service logon user & password set by the installation package ServiceConfig(params.knox_gateway_win_service_name, action="change_user", username=params.knox_user, password=Script.get_password(params.knox_user)) File(os.path.join(params.knox_conf_dir, "gateway-log4j.properties"), owner=params.knox_user, content=params.gateway_log4j) File(os.path.join(params.knox_conf_dir, "topologies", "default.xml"), group=params.knox_group, owner=params.knox_user, content=InlineTemplate(params.topology_template)) if params.admin_topology_template: File(os.path.join(params.knox_conf_dir, "topologies", "admin.xml"), group=params.knox_group, owner=params.knox_user, content=InlineTemplate(params.admin_topology_template)) if params.version_formatted and check_stack_feature( StackFeature.KNOX_SSO_TOPOLOGY, params.version_formatted): knoxsso_topology_template_content = get_config("knoxsso-topology") if knoxsso_topology_template_content: File(os.path.join(params.knox_conf_dir, "topologies", "knoxsso.xml"), group=params.knox_group, owner=params.knox_user, content=InlineTemplate(params.knoxsso_topology_template)) if params.security_enabled: TemplateConfig(os.path.join(params.knox_conf_dir, "krb5JAASLogin.conf"), owner=params.knox_user, template_tag=None) if not os.path.isfile(params.knox_master_secret_path): cmd = format( 'cmd /C {knox_client_bin} create-master --master {knox_master_secret!p}' ) Execute(cmd) cmd = format( 'cmd /C {knox_client_bin} create-cert --hostname {knox_host_name_in_cluster}' ) Execute(cmd)
def webhcat(): import params XmlConfig("webhcat-site.xml", conf_dir=params.hcat_config_dir, configurations=params.config['configurations']['webhcat-site']) # Manually overriding service logon user & password set by the installation package ServiceConfig(params.webhcat_server_win_service_name, action="change_user", username=params.webhcat_user, password=Script.get_password(params.webhcat_user))
def ldap(): import params # Manually overriding service logon user & password set by the installation package ServiceConfig(params.knox_ldap_win_service_name, action="change_user", username=params.knox_user, password=Script.get_password(params.knox_user)) _ldap_common()
def hive(name=None): import params XmlConfig("hive-site.xml", conf_dir = params.hive_conf_dir, configurations = params.config['configurations']['hive-site'], owner=params.hive_user, configuration_attributes=params.config['configuration_attributes']['hive-site'] ) if name in ["hiveserver2","metastore"]: # Manually overriding service logon user & password set by the installation package service_name = params.service_map[name] ServiceConfig(service_name, action="change_user", username = params.hive_user, password = Script.get_password(params.hive_user)) Execute(format("cmd /c hadoop fs -mkdir -p {hive_warehouse_dir}"), logoutput=True, user=params.hadoop_user) if name == 'metastore': if params.init_metastore_schema: check_schema_created_cmd = format('cmd /c "{hive_bin}\\hive.cmd --service schematool -info ' '-dbType {hive_metastore_db_type} ' '-userName {hive_metastore_user_name} ' '-passWord {hive_metastore_user_passwd!p}' '&set EXITCODE=%ERRORLEVEL%&exit /B %EXITCODE%"', #cmd "feature", propagate the process exit code manually hive_bin=params.hive_bin, hive_metastore_db_type=params.hive_metastore_db_type, hive_metastore_user_name=params.hive_metastore_user_name, hive_metastore_user_passwd=params.hive_metastore_user_passwd) try: Execute(check_schema_created_cmd) except Fail: create_schema_cmd = format('cmd /c {hive_bin}\\hive.cmd --service schematool -initSchema ' '-dbType {hive_metastore_db_type} ' '-userName {hive_metastore_user_name} ' '-passWord {hive_metastore_user_passwd!p}', hive_bin=params.hive_bin, hive_metastore_db_type=params.hive_metastore_db_type, hive_metastore_user_name=params.hive_metastore_user_name, hive_metastore_user_passwd=params.hive_metastore_user_passwd) Execute(create_schema_cmd, user = params.hive_user, logoutput=True ) if name == "hiveserver2": if params.hive_execution_engine == "tez": # Init the tez app dir in hadoop script_file = __file__.replace('/', os.sep) cmd_file = os.path.normpath(os.path.join(os.path.dirname(script_file), "..", "files", "hiveTezSetup.cmd")) Execute("cmd /c " + cmd_file, logoutput=True, user=params.hadoop_user)
def oozie(is_server=False): import params from status_params import oozie_server_win_service_name XmlConfig( "oozie-site.xml", conf_dir=params.oozie_conf_dir, configurations=params.config['configurations']['oozie-site'], owner=params.oozie_user, mode='f', configuration_attributes=params.config['configuration_attributes'] ['oozie-site']) File(os.path.join(params.oozie_conf_dir, "oozie-env.cmd"), owner=params.oozie_user, content=InlineTemplate(params.oozie_env_cmd_template)) Directory( params.oozie_tmp_dir, owner=params.oozie_user, recursive=True, ) if is_server: # Manually overriding service logon user & password set by the installation package ServiceConfig(oozie_server_win_service_name, action="change_user", username=params.oozie_user, password=Script.get_password(params.oozie_user)) download_file( os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc4.jar"), os.path.join(params.oozie_root, "extra_libs", "sqljdbc4.jar")) webapps_sqljdbc_path = os.path.join(params.oozie_home, "oozie-server", "webapps", "oozie", "WEB-INF", "lib", "sqljdbc4.jar") if os.path.isfile(webapps_sqljdbc_path): download_file( os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc4.jar"), webapps_sqljdbc_path) download_file( os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc4.jar"), os.path.join(params.oozie_home, "share", "lib", "oozie", "sqljdbc4.jar")) download_file( os.path.join(params.config['hostLevelParams']['jdk_location'], "sqljdbc4.jar"), os.path.join(params.oozie_home, "temp", "WEB-INF", "lib", "sqljdbc4.jar"))
def storm(name=None): import params yaml_config("storm.yaml", conf_dir=params.conf_dir, configurations=params.config['configurations']['storm-site'], owner=params.storm_user) if params.service_map.has_key(name): service_name = params.service_map[name] ServiceConfig(service_name, action="change_user", username=params.storm_user, password=Script.get_password(params.storm_user))
def hbase(name=None): import params XmlConfig("hbase-site.xml", conf_dir = params.hbase_conf_dir, configurations = params.config['configurations']['hbase-site'], configuration_attributes=params.config['configuration_attributes']['hbase-site'] ) if params.service_map.has_key(name): # Manually overriding service logon user & password set by the installation package service_name = params.service_map[name] ServiceConfig(service_name, action="change_user", username = params.hbase_user, password = Script.get_password(params.hbase_user))
def hdfs(component=None): import params if component == "namenode": directories = params.dfs_name_dir.split(",") Directory(directories, owner=params.hdfs_user, mode="(OI)(CI)F", create_parents=True) File( params.exclude_file_path, content=Template("exclude_hosts_list.j2"), owner=params.hdfs_user, mode="f", ) if params.hdfs_include_file: File( params.include_file_path, content=Template("include_hosts_list.j2"), owner=params.hdfs_user, mode="f", ) pass if params.service_map.has_key(component): service_name = params.service_map[component] ServiceConfig(service_name, action="change_user", username=params.hdfs_user, password=Script.get_password(params.hdfs_user)) if "hadoop-policy" in params.config['configurations']: XmlConfig( "hadoop-policy.xml", conf_dir=params.hadoop_conf_dir, configurations=params.config['configurations']['hadoop-policy'], owner=params.hdfs_user, mode="f", configuration_attributes=params.config['configuration_attributes'] ['hadoop-policy']) XmlConfig( "hdfs-site.xml", conf_dir=params.hadoop_conf_dir, configurations=params.config['configurations']['hdfs-site'], owner=params.hdfs_user, mode="f", configuration_attributes=params.config['configuration_attributes'] ['hdfs-site'])
def ams(name=None): import params if name == 'collector': if not check_windows_service_exists(params.ams_collector_win_service_name): Execute(format("cmd /C cd {ams_collector_home_dir} & ambari-metrics-collector.cmd setup")) Directory(params.ams_collector_conf_dir, owner=params.ams_user, create_parents = True ) Directory(params.ams_checkpoint_dir, owner=params.ams_user, create_parents = True ) XmlConfig("ams-site.xml", conf_dir=params.ams_collector_conf_dir, configurations=params.config['configurations']['ams-site'], configuration_attributes=params.config['configuration_attributes']['ams-site'], owner=params.ams_user, ) merged_ams_hbase_site = {} merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-site']) if params.security_enabled: merged_ams_hbase_site.update(params.config['configurations']['ams-hbase-security-site']) XmlConfig( "hbase-site.xml", conf_dir = params.ams_collector_conf_dir, configurations = merged_ams_hbase_site, configuration_attributes=params.config['configuration_attributes']['ams-hbase-site'], owner = params.ams_user, ) if (params.log4j_props != None): File(os.path.join(params.ams_collector_conf_dir, "log4j.properties"), owner=params.ams_user, content=params.log4j_props ) File(os.path.join(params.ams_collector_conf_dir, "ams-env.cmd"), owner=params.ams_user, content=InlineTemplate(params.ams_env_sh_template) ) ServiceConfig(params.ams_collector_win_service_name, action="change_user", username = params.ams_user, password = Script.get_password(params.ams_user)) if not params.is_local_fs_rootdir: # Configuration needed to support NN HA XmlConfig("hdfs-site.xml", conf_dir=params.ams_collector_conf_dir, configurations=params.config['configurations']['hdfs-site'], configuration_attributes=params.config['configuration_attributes']['hdfs-site'], owner=params.ams_user, group=params.user_group, mode=0644 ) XmlConfig("hdfs-site.xml", conf_dir=params.hbase_conf_dir, configurations=params.config['configurations']['hdfs-site'], configuration_attributes=params.config['configuration_attributes']['hdfs-site'], owner=params.ams_user, group=params.user_group, mode=0644 ) XmlConfig("core-site.xml", conf_dir=params.ams_collector_conf_dir, configurations=params.config['configurations']['core-site'], configuration_attributes=params.config['configuration_attributes']['core-site'], owner=params.ams_user, group=params.user_group, mode=0644 ) XmlConfig("core-site.xml", conf_dir=params.hbase_conf_dir, configurations=params.config['configurations']['core-site'], configuration_attributes=params.config['configuration_attributes']['core-site'], owner=params.ams_user, group=params.user_group, mode=0644 ) else: ServiceConfig(params.ams_embedded_hbase_win_service_name, action="change_user", username = params.ams_user, password = Script.get_password(params.ams_user)) # creating symbolic links on ams jars to make them available to services links_pairs = [ ("%COLLECTOR_HOME%\\hbase\\lib\\ambari-metrics-hadoop-sink-with-common.jar", "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"), ] for link_pair in links_pairs: link, target = link_pair real_link = os.path.expandvars(link) target = compress_backslashes(glob.glob(os.path.expandvars(target))[0]) if not os.path.exists(real_link): #TODO check the symlink destination too. Broken in Python 2.x on Windows. Execute('cmd /c mklink "{0}" "{1}"'.format(real_link, target)) pass elif name == 'monitor': if not check_windows_service_exists(params.ams_monitor_win_service_name): Execute(format("cmd /C cd {ams_monitor_home_dir} & ambari-metrics-monitor.cmd setup")) # creating symbolic links on ams jars to make them available to services links_pairs = [ ("%HADOOP_HOME%\\share\\hadoop\\common\\lib\\ambari-metrics-hadoop-sink-with-common.jar", "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"), ("%HBASE_HOME%\\lib\\ambari-metrics-hadoop-sink-with-common.jar", "%SINK_HOME%\\hadoop-sink\\ambari-metrics-hadoop-sink-with-common-*.jar"), ] for link_pair in links_pairs: link, target = link_pair real_link = os.path.expandvars(link) target = compress_backslashes(glob.glob(os.path.expandvars(target))[0]) if not os.path.exists(real_link): #TODO check the symlink destination too. Broken in Python 2.x on Windows. Execute('cmd /c mklink "{0}" "{1}"'.format(real_link, target)) Directory(params.ams_monitor_conf_dir, owner=params.ams_user, create_parents = True ) TemplateConfig( os.path.join(params.ams_monitor_conf_dir, "metric_monitor.ini"), owner=params.ams_user, template_tag=None ) TemplateConfig( os.path.join(params.ams_monitor_conf_dir, "metric_groups.conf"), owner=params.ams_user, template_tag=None ) ServiceConfig(params.ams_monitor_win_service_name, action="change_user", username = params.ams_user, password = Script.get_password(params.ams_user))