Пример #1
0
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.script.script import Script

# a map of the Ambari role to the component name
# for use with <stack-root>/current/<component>
SERVER_ROLE_DIRECTORY_MAP = {
    'SPARK2_JOBHISTORYSERVER': 'spark2-historyserver',
    'SPARK2_CLIENT': 'spark2-client',
    'SPARK2_THRIFTSERVER': 'spark2-thriftserver'
}

component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP,
                                                     "SPARK2_CLIENT")

config = Script.get_config()
tmp_dir = Script.get_tmp_dir()

stack_name = status_params.stack_name
stack_root = Script.get_stack_root()
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

# New Cluster Stack Version that is defined during the RESTART of a Stack Upgrade
version = default("/commandParams/version", None)

spark_conf = '/etc/spark2/conf'
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
Пример #2
0
from resource_management.libraries.script.script import Script

# a map of the Ambari role to the component name
# for use with <stack-root>/current/<component>
SERVER_ROLE_DIRECTORY_MAP = {
    'HIVE_METASTORE': 'hive-metastore',
    'HIVE_SERVER': 'hive-server2',
    'WEBHCAT_SERVER': 'hive-webhcat',
    'HIVE_CLIENT': 'hive-client',
    'HCAT': 'hive-client',
    'HIVE_SERVER_INTERACTIVE': 'hive-server2-hive2'
}

# Either HIVE_METASTORE, HIVE_SERVER, WEBHCAT_SERVER, HIVE_CLIENT, HCAT, HIVE_SERVER_INTERACTIVE
role = default("/role", None)
component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP,
                                                     "HIVE_CLIENT")
component_directory_interactive = Script.get_component_from_role(
    SERVER_ROLE_DIRECTORY_MAP, "HIVE_SERVER_INTERACTIVE")

config = Script.get_config()

stack_root = Script.get_stack_root()
stack_version_unformatted = config['hostLevelParams']['stack_version']
stack_version_formatted_major = format_stack_version(stack_version_unformatted)

if OSCheck.is_windows_family():
    hive_metastore_win_service_name = "metastore"
    hive_client_win_service_name = "hwi"
    hive_server_win_service_name = "hiveserver2"
    webhcat_server_win_service_name = "templeton"
else:
Пример #3
0
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.get_stack_version import get_stack_version
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
from resource_management.libraries.script.script import Script
import status_params
import functools
# a map of the Ambari role to the component name
# for use with <stack-root>/current/<component>
SERVER_ROLE_DIRECTORY_MAP = {
    'HUE_SERVER': 'hue-server',
}

component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP,
                                                     "HUE_SERVER")
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
stack_root = Script.get_stack_root()
# Hue download url
download_url = 'echo https://www.dropbox.com/s/0rhrlnjmyw6bnfc/hue-4.2.0.tgz'
# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
version = default("/commandParams/version", None)
stack_name = default("/hostLevelParams/stack_name", None)
#e.g. /var/lib/ambari-agent/cache/stacks/HDP/$VERSION/services/HUE/package
service_packagedir = os.path.realpath(__file__).split('/scripts')[0]
cluster_name = str(config['clusterName'])
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]

#hue_apps = ['security','pig','filebrowser','jobbrowser','zookeeper','search','rdbms','metastore','spark','beeswax','jobsub','hbase','oozie','indexer']
hue_hdfs_module_enabled = config['configurations']['hue-env'][
Пример #4
0
# server configurations
config = Script.get_config()
stack_root = Script.get_stack_root()
tmp_dir = Script.get_tmp_dir()

stack_name = default("/clusterLevelParams/stack_name", None)

# stack version
stack_version = default("/commandParams/version", None)

# un-formatted stack version
stack_version_unformatted = str(config['clusterLevelParams']['stack_version'])

# default role to coordinator needed for service checks
component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "DRUID_COORDINATOR")

hostname = config['agentLevelParams']['hostname']
sudo = AMBARI_SUDO_BINARY

# default druid parameters
druid_home = format("{stack_root}/current/{component_directory}")
druid_conf_dir = format("{stack_root}/current/{component_directory}/conf")

druid_common_conf_dir = druid_conf_dir + "/_common"
druid_coordinator_conf_dir = druid_conf_dir + "/coordinator"
druid_overlord_conf_dir = druid_conf_dir + "/overlord"
druid_broker_conf_dir = druid_conf_dir + "/broker"
druid_historical_conf_dir = druid_conf_dir + "/historical"
druid_middlemanager_conf_dir = druid_conf_dir + "/middleManager"
druid_router_conf_dir = druid_conf_dir + "/router"
Пример #5
0
from resource_management.libraries.functions.get_stack_version import get_stack_version
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.resources.hdfs_resource import HdfsResource
from resource_management.libraries.functions.get_not_managed_resources import get_not_managed_resources
from resource_management.libraries.script.script import Script
import functools
# a map of the Ambari role to the component name
# for use with <stack-root>/current/<component>
SERVER_ROLE_DIRECTORY_MAP = {
    'AIRFLOW_WEBSERVER': 'airflow-webserver',
    'AIRFLOW_SCHEDULER': 'airflow-scheduler',
    'AIRFLOW_WORKER': 'airflow-worker'
}

component_directory_web = Script.get_component_from_role(
    SERVER_ROLE_DIRECTORY_MAP, "AIRFLOW_WEBSERVER")
component_directory_sched = Script.get_component_from_role(
    SERVER_ROLE_DIRECTORY_MAP, "AIRFLOW_SCHEDULER")
component_directory_work = Script.get_component_from_role(
    SERVER_ROLE_DIRECTORY_MAP, "AIRFLOW_WORKER")
config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
stack_root = Script.get_stack_root()
# New Cluster Stack Version that is defined during the RESTART of a Rolling Upgrade
version = default("/commandParams/version", None)
stack_name = default("/hostLevelParams/stack_name", None)
#e.g. /var/lib/ambari-agent/cache/stacks/HDP/$VERSION/services/AIRFLOW/package
service_packagedir = os.path.realpath(__file__).split('/scripts')[0]
cluster_name = str(config['clusterName'])
ambari_server_hostname = config['clusterHostInfo']['ambari_server_host'][0]
security_enabled = config['configurations']['cluster-env']['security_enabled']
Пример #6
0
from resource_management.libraries.functions import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import get_kinit_path
from resource_management.libraries.script.script import Script

# a map of the Ambari role to the component name
# for use with /usr/hdp/current/<component>
SERVER_ROLE_DIRECTORY_MAP = {
  'HIVE_METASTORE' : 'hive-metastore',
  'HIVE_SERVER' : 'hive-server2',
  'WEBHCAT_SERVER' : 'hive-webhcat',
  'HIVE_CLIENT' : 'hive-client',
  'HCAT' : 'hive-client'
}

component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP, "HIVE_CLIENT")

config = Script.get_config()

if OSCheck.is_windows_family():
  hive_metastore_win_service_name = "metastore"
  hive_client_win_service_name = "hwi"
  hive_server_win_service_name = "hiveserver2"
  webhcat_server_win_service_name = "templeton"
else:
  hive_pid_dir = config['configurations']['hive-env']['hive_pid_dir']
  hive_pid = 'hive-server.pid'

  hive_metastore_pid = 'hive.pid'

  hcat_pid_dir = config['configurations']['hive-env']['hcat_pid_dir'] #hcat_pid_dir
Пример #7
0
SERVER_ROLE_DIRECTORY_MAP = {
    'SPARK2_JOBHISTORYSERVER': 'spark2-historyserver',
    'SPARK2_CLIENT': 'spark2-client',
    'SPARK2_THRIFTSERVER': 'spark2-thriftserver',
    'LIVY2_SERVER': 'livy2-server',
    'LIVY2_CLIENT': 'livy2-client'
}

HIVE_SERVER_ROLE_DIRECTORY_MAP = {
    'HIVE_METASTORE': 'hive-metastore',
    'HIVE_SERVER': 'hive-server2',
    'HIVE_CLIENT': 'hive-client',
    'HIVE_SERVER_INTERACTIVE': 'hive-server2'
}

component_directory = Script.get_component_from_role(SERVER_ROLE_DIRECTORY_MAP,
                                                     "SPARK2_CLIENT")
hive_component_directory = Script.get_component_from_role(
    HIVE_SERVER_ROLE_DIRECTORY_MAP, "HIVE_CLIENT")

config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
sudo = AMBARI_SUDO_BINARY

stack_name = status_params.stack_name
stack_root = Script.get_stack_root()
stack_version_unformatted = config['clusterLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
major_stack_version = get_major_version(stack_version_formatted)

sysprep_skip_copy_tarballs_hdfs = get_sysprep_skip_copy_tarballs_hdfs()