Exemplo n.º 1
0
def link_config(old_conf, link_conf):
  """
  Creates a config link following:
  1. Checks if the old_conf location exists
  2. If it does, check if it's a link already
  3. Make a copy to /etc/[component]/conf.backup
  4. Remove the old directory and create a symlink to link_conf

  :old_conf: the old config directory, ie /etc/[component]/conf
  :link_conf: the new target for the config directory, ie <stack-root>/current/[component-dir]/conf
  """
  if os.path.islink(old_conf):
    # if the link exists but is wrong, then change it
    if os.path.realpath(old_conf) != link_conf:
      Link(old_conf, to = link_conf)
    else:
      Logger.debug("Skipping {0}; it is already a link".format(old_conf))
    return

  if not os.path.exists(old_conf):
    Logger.debug("Skipping {0}; it does not exist".format(old_conf))
    return

  old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))

  Logger.info("Linking {0} to {1}".format(old_conf, link_conf))

  old_conf_copy = os.path.join(old_parent, "conf.backup")
  if not os.path.exists(old_conf_copy):
    Execute(("cp", "-R", "-p", old_conf, old_conf_copy), sudo=True, logoutput=True)

  shutil.rmtree(old_conf, ignore_errors=True)

  # link /etc/[component]/conf -> <stack-root>/current/[component]-client/conf
  Link(old_conf, to = link_conf)
Exemplo n.º 2
0
    def install_ranger(self):
        import params
        splice_lib_dir = "/var/lib/splicemachine"
        ranger_home = format('{params.stack_root}/current/ranger-admin')
        ranger_user = params.config['configurations']['ranger-env'][
            'ranger_user']
        ranger_plugins_dir = os.path.join(
            ranger_home,
            "ews/webapp/WEB-INF/classes/ranger-plugins/splicemachine")

        Directory(ranger_plugins_dir,
                  owner=ranger_user,
                  group=ranger_user,
                  create_parents=False)

        splice_ranger_jar = self.search_file(splice_lib_dir,
                                             "splice_ranger_admin-hdp*.jar")
        db_client_jar = self.search_file(splice_lib_dir, "db-client-*.jar")

        Link(os.path.join(ranger_plugins_dir, splice_ranger_jar),
             to=os.path.join(splice_lib_dir, splice_ranger_jar))
        Link(os.path.join(ranger_plugins_dir, db_client_jar),
             to=os.path.join(splice_lib_dir, db_client_jar))

        hbase_user = params.config['configurations']['hbase-env']['hbase_user']
        hdfs_audit_dir = params.config['configurations'][
            'ranger-splicemachine-audit'][
                'xasecure.audit.destination.hdfs.dir']

        params.HdfsResource(hdfs_audit_dir,
                            type="directory",
                            action="create_on_execute",
                            owner=hbase_user)
Exemplo n.º 3
0
def setup_atlas_falcon():
    import params

    if params.has_atlas:

        if not params.host_sys_prepped:
            Package(
                params.atlas_ubuntu_plugin_package
                if OSCheck.is_ubuntu_family() else params.atlas_plugin_package,
                retry_on_repo_unavailability=params.
                agent_stack_retry_on_unavailability,
                retry_count=params.agent_stack_retry_count)

        atlas_falcon_hook_dir = os.path.join(params.atlas_home_dir, "hook",
                                             "falcon")
        if os.path.exists(atlas_falcon_hook_dir):
            Link(os.path.join(params.falcon_conf_dir, params.atlas_conf_file),
                 to=os.path.join(params.atlas_conf_dir,
                                 params.atlas_conf_file))

            Directory(params.falcon_webinf_lib,
                      owner=params.falcon_user,
                      create_parents=True)

            src_files = os.listdir(atlas_falcon_hook_dir)
            for file_name in src_files:
                atlas_falcon_hook_file_name = os.path.join(
                    atlas_falcon_hook_dir, file_name)
                falcon_lib_file_name = os.path.join(params.falcon_webinf_lib,
                                                    file_name)
                if (os.path.isfile(atlas_falcon_hook_file_name)):
                    Link(falcon_lib_file_name, to=atlas_falcon_hook_file_name)
Exemplo n.º 4
0
def select(stack_name, package, version, try_create=True, ignore_errors=False):
  """
  Selects a config version for the specified package. If this detects that
  the stack supports configuration versioning but /etc/<component>/conf is a
  directory, then it will attempt to bootstrap the conf.backup directory and change
  /etc/<component>/conf into a symlink.

  :param stack_name: the name of the stack
  :param package: the name of the package, as-used by <conf-selector-tool>
  :param version: the version number to create
  :param try_create: optional argument to attempt to create the directory before setting it
  :param ignore_errors: optional argument to ignore any error and simply log a warning
  """
  try:
    # do nothing if the stack does not support versioned configurations
    if not _valid(stack_name, package, version):
      return

    if try_create:
      create(stack_name, package, version)

    shell.checked_call(_get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)

    # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
    # points to <stack-root>/current/<component>/conf - this is because some people still prefer to
    # use /etc/<component>/conf even though <stack-root> is the "future"
    package_dirs = get_package_dirs()
    if package in package_dirs:
      Logger.info("Ensuring that {0} has the correct symlink structure".format(package))

      directory_list = package_dirs[package]
      for directory_structure in directory_list:
        conf_dir = directory_structure["conf_dir"]
        current_dir = directory_structure["current_dir"]

        # if /etc/<component>/conf is missing or is not a symlink
        if not os.path.islink(conf_dir):
          # if /etc/<component>/conf is not a link and it exists, convert it to a symlink
          if os.path.exists(conf_dir):
            parent_directory = os.path.dirname(conf_dir)
            conf_backup_dir = os.path.join(parent_directory, "conf.backup")

            # create conf.backup and copy files to it (if it doesn't exist)
            Execute(("cp", "-R", "-p", conf_dir, conf_backup_dir),
              not_if = format("test -e {conf_backup_dir}"), sudo = True)

            # delete the old /etc/<component>/conf directory and link to the backup
            Directory(conf_dir, action="delete")
            Link(conf_dir, to = conf_backup_dir)
          else:
            # missing entirely
            # /etc/<component>/conf -> <stack-root>/current/<component>/conf
            Link(conf_dir, to = current_dir)

  except Exception, exception:
    if ignore_errors is True:
      Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
        str(exception)))
    else:
      raise
Exemplo n.º 5
0
    def start(self, env, upgrade_type=None):
        import params
        env.set_params(params)
        self.configure(env)
        daemon_cmd = format('{knox_bin} start')
        populate_topology = format(
            'cd {knox_conf_dir}/topologies/; {sudo} ambari-python-wrap ./generate_template.py '
            + params.HAServers +
            ' ; {sudo} chmod 640 *.xml; {sudo} chown knox:knox *.xml')
        no_op_test = format(
            'ls {knox_pid_file} >/dev/null 2>&1 && ps -p `cat {knox_pid_file}` >/dev/null 2>&1'
        )

        setup_ranger_knox(upgrade_type=upgrade_type)
        # Used to setup symlink, needed to update the knox managed symlink, in case of custom locations
        if os.path.islink(params.knox_managed_pid_symlink):
            Link(
                params.knox_managed_pid_symlink,
                to=params.knox_pid_dir,
            )

        if os.path.islink(params.knox_managed_logs_symlink):
            Link(
                params.knox_managed_logs_symlink,
                to=params.knox_logs_dir,
            )

        update_knox_logfolder_permissions()

        Execute(populate_topology)
        Execute(daemon_cmd,
                user=params.knox_user,
                environment={'JAVA_HOME': params.java_home},
                not_if=no_op_test)
Exemplo n.º 6
0
def setup_atlas_sqoop():
    import params

    if params.has_atlas:

        if not params.host_sys_prepped:
            Package(
                params.atlas_ubuntu_plugin_package
                if OSCheck.is_ubuntu_family() else params.atlas_plugin_package,
                retry_on_repo_unavailability=params.
                agent_stack_retry_on_unavailability,
                retry_count=params.agent_stack_retry_count)

        atlas_sqoop_hook_dir = os.path.join(params.atlas_home_dir, "hook",
                                            "sqoop")
        if os.path.exists(atlas_sqoop_hook_dir):
            Link(os.path.join(params.sqoop_conf_dir, params.atlas_conf_file),
                 to=os.path.join(params.atlas_conf_dir,
                                 params.atlas_conf_file))

            src_files = os.listdir(atlas_sqoop_hook_dir)
            for file_name in src_files:
                atlas_sqoop_hook_file_name = os.path.join(
                    atlas_sqoop_hook_dir, file_name)
                sqoop_lib_file_name = os.path.join(params.sqoop_lib, file_name)
                if (os.path.isfile(atlas_sqoop_hook_file_name)):
                    Link(sqoop_lib_file_name, to=atlas_sqoop_hook_file_name)
Exemplo n.º 7
0
def setup_symlink(kafka_managed_dir, kafka_ambari_managed_dir):
    import params
    backup_folder_path = None
    backup_folder_suffix = "_tmp"
    if kafka_ambari_managed_dir != kafka_managed_dir:
        if os.path.exists(
                kafka_managed_dir) and not os.path.islink(kafka_managed_dir):

            # Backup existing data before delete if config is changed repeatedly to/from default location at any point in time time, as there may be relevant contents (historic logs)
            backup_folder_path = backup_dir_contents(kafka_managed_dir,
                                                     backup_folder_suffix)

            Directory(kafka_managed_dir, action="delete", create_parents=True)

        elif os.path.islink(kafka_managed_dir) and os.path.realpath(
                kafka_managed_dir) != kafka_ambari_managed_dir:
            Link(kafka_managed_dir, action="delete")

        if not os.path.islink(kafka_managed_dir):
            Link(kafka_managed_dir, to=kafka_ambari_managed_dir)

    elif os.path.islink(
            kafka_managed_dir
    ):  # If config is changed and coincides with the kafka managed dir, remove the symlink and physically create the folder
        Link(kafka_managed_dir, action="delete")

        Directory(
            kafka_managed_dir,
            mode=0755,
            cd_access='a',
            owner=params.kafka_user,
            group=params.user_group,
            create_parents=True,
            recursive_ownership=True,
        )

    if backup_folder_path:
        # Restore backed up files to current relevant dirs if needed - will be triggered only when changing to/from default path;
        for file in os.listdir(backup_folder_path):
            if os.path.isdir(os.path.join(backup_folder_path, file)):
                Execute(('cp', '-r', os.path.join(backup_folder_path,
                                                  file), kafka_managed_dir),
                        sudo=True)
                Execute(("chown", "-R", format("{kafka_user}:{user_group}"),
                         os.path.join(kafka_managed_dir, file)),
                        sudo=True)
            else:
                File(os.path.join(kafka_managed_dir, file),
                     owner=params.kafka_user,
                     content=StaticFile(os.path.join(backup_folder_path,
                                                     file)))

        # Clean up backed up folder
        Directory(backup_folder_path, action="delete", create_parents=True)
Exemplo n.º 8
0
def install_snappy():
  import params
  Directory([params.so_target_dir_x86, params.so_target_dir_x64],
            create_parents = True,
  )
  Link(params.so_target_x86,
       to=params.so_src_x86,
  )
  Link(params.so_target_x64,
       to=params.so_src_x64,
  )
Exemplo n.º 9
0
 def install(self, env):
     import params
     presto_cli_path = params.presto_cli_path
     Directory(presto_cli_path,
               create_parents=True,
               owner='root',
               group='root')
     Execute('wget --no-check-certificate {0} -O {1}'.format(
         PRESTO_CLI_URL, presto_cli_path + '/presto-cli'))
     Execute('chmod +x {0}'.format(presto_cli_path + '/presto-cli'))
     Link('/usr/hdp/current/presto-client', to=params.presto_cli_path)
     Link('/usr/bin/presto-cli',
          to='/usr/hdp/current/presto-client/presto-cli')
Exemplo n.º 10
0
    def link_metrics_sink_jar(self):
        # Add storm metrics reporter JAR to storm-ui-server classpath.
        # Remove symlinks. They can be there, if you doing upgrade from HDP < 2.2 to HDP >= 2.2
        Link(format("{storm_lib_dir}/ambari-metrics-storm-sink.jar"),
             action="delete")
        # On old HDP 2.1 versions, this symlink may also exist and break EU to newer versions
        Link("/usr/lib/storm/lib/ambari-metrics-storm-sink.jar",
             action="delete")

        Execute(format(
            "{sudo} ln -s {metric_collector_sink_jar} {storm_lib_dir}/ambari-metrics-storm-sink.jar"
        ),
                not_if=format(
                    "ls {storm_lib_dir}/ambari-metrics-storm-sink.jar"),
                only_if=format("ls {metric_collector_sink_jar}"))
Exemplo n.º 11
0
def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
    """
  In HDP 2.3, 2.4, and 2.5.0.0, Sqoop and Storm still relied on the following method to setup Atlas hooks
  because the RPM for Sqoop and Storm did not bring in any dependencies.

  /usr/hdp/current/storm-*/libext/ should contain symlinks for every jar in /usr/hdp/current/atlas-server/hooks/storm/somejavafile.jar
  /usr/hdp/current/sqoop-*/lib/    should contain symlinks for every jar in /usr/hdp/current/atlas-server/hooks/sqoop/somejavafile.jar

  In HDP 2.5.x.y, we plan to have the Sqoop and Storm rpms have additional dependencies on some sqoop-atlas-hook and storm-atlas-hook
  rpms, respectively, that will bring in the necessary jars and create the symlinks.

  If atlas is present on this host, then link the jars from
  {stack_root}/current/{hook_name}/lib/name_version.jar -> {jar_source_dir}/name_version.jar
  @param hook_name: one of sqoop, storm
  @param jar_source_dir: directory of where the symlinks need to be created from.
  """
    import params

    if has_atlas_in_cluster():
        atlas_home_dir = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ \
          else format("{stack_root}/current/atlas-server")

        # Will only exist if this host contains Atlas Server
        atlas_hook_dir = os.path.join(atlas_home_dir, "hook", hook_name)
        if os.path.exists(atlas_hook_dir):
            Logger.info(
                "Atlas Server is present on this host, will symlink jars inside of %s to %s if not already done."
                % (jar_source_dir, atlas_hook_dir))

            src_files = os.listdir(atlas_hook_dir)
            for file_name in src_files:
                atlas_hook_file_name = os.path.join(atlas_hook_dir, file_name)
                source_lib_file_name = os.path.join(jar_source_dir, file_name)
                if os.path.isfile(atlas_hook_file_name):
                    Link(source_lib_file_name, to=atlas_hook_file_name)
Exemplo n.º 12
0
def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
    """

    @param hook_name: one of sqoop, storm, hive
    @param jar_source_dir: directory of where the symlinks need to be created from.
    """
    install_atlas_hook(hook_name)

    stack_root = '/opt'
    atlas_hook_dir = stack_root + '/atlas-' + hook_name + '-plugin'

    if os.path.exists(atlas_hook_dir):
        Logger.info(
            "Atlas Server is present on this host, will symlink jars inside of %s to %s if not already done."
            % (jar_source_dir, atlas_hook_dir))

        src_files = os.listdir(atlas_hook_dir)
        for file_name in src_files:
            atlas_hook_file_name = os.path.join(atlas_hook_dir, file_name)
            source_lib_file_name = os.path.join(jar_source_dir, file_name)
            if os.path.isfile(atlas_hook_file_name):
                Link(source_lib_file_name, to=atlas_hook_file_name)
    else:
        Logger.info("Atlas hook directory path {0} doesn't exist".format(
            atlas_hook_dir))
Exemplo n.º 13
0
def sqoop(type=None):
    import params
    Link(params.sqoop_lib + "/mysql-connector-java.jar",
         to='/usr/share/java/mysql-connector-java.jar')

    jdbc_connector()

    Directory(params.sqoop_conf_dir,
              owner=params.sqoop_user,
              group=params.user_group,
              create_parents=True)

    configs = {}
    configs.update(params.config['configurations']['sqoop-site'])

    XmlConfig(
        "sqoop-site.xml",
        conf_dir=params.sqoop_conf_dir,
        configurations=configs,
        configuration_attributes=params.config['configuration_attributes']
        ['sqoop-site'],
        owner=params.sqoop_user,
        group=params.user_group)

    setup_atlas_sqoop()

    File(format("{sqoop_conf_dir}/sqoop-env.sh"),
         owner=params.sqoop_user,
         group=params.user_group,
         content=InlineTemplate(params.sqoop_env_sh_template))
    update_config_permissions(
        ["sqoop-env-template.sh", "sqoop-site-template.xml", "sqoop-site.xml"])
    pass
Exemplo n.º 14
0
  def _create_config_links_if_necessary(self, stack_id, stack_version):
    """
    Sets up the required structure for /etc/<component>/conf symlinks and <stack-root>/current
    configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3

    stack_id:  stack id, ie HDP-2.3
    stack_version:  version to set, ie 2.3.0.0-1234
    """
    if stack_id is None:
      Logger.info("Cannot create config links when stack_id is not defined")
      return

    args = stack_id.upper().split('-')
    if len(args) != 2:
      Logger.info("Unrecognized stack id {0}, cannot create config links".format(stack_id))
      return

    target_stack_version = args[1]
    if not (target_stack_version and check_stack_feature(StackFeature.CONFIG_VERSIONING, target_stack_version)):
      Logger.info("Configuration symlinks are not needed for {0}".format(stack_version))
      return

    # After upgrading hdf-select package from HDF-2.X to HDF-3.Y, we need to create this symlink
    if self.stack_name.upper() == "HDF" \
            and not os.path.exists("/usr/bin/conf-select") and os.path.exists("/usr/bin/hdfconf-select"):
      Link("/usr/bin/conf-select", to = "/usr/bin/hdfconf-select")

    for package_name, directories in conf_select.get_package_dirs().iteritems():
      conf_selector_name = stack_tools.get_stack_tool_name(stack_tools.CONF_SELECTOR_NAME)
      Logger.info("The current cluster stack of {0} does not require backing up configurations; "
                  "only {1} versioned config directories will be created.".format(stack_version, conf_selector_name))
      # only link configs for all known packages
      conf_select.select(self.stack_name, package_name, stack_version, ignore_errors = True)
Exemplo n.º 15
0
def setup_atlas_storm():
    import params

    if params.has_atlas:

        if not params.host_sys_prepped:
            Package(
                params.atlas_ubuntu_plugin_package
                if OSCheck.is_ubuntu_family() else params.atlas_plugin_package,
                retry_on_repo_unavailability=params.
                agent_stack_retry_on_unavailability,
                retry_count=params.agent_stack_retry_count)

        PropertiesFile(format('{conf_dir}/{atlas_conf_file}'),
                       properties=params.atlas_props,
                       owner=params.storm_user,
                       group=params.user_group,
                       mode=0644)

        atlas_storm_hook_dir = os.path.join(params.atlas_home_dir, "hook",
                                            "storm")
        if os.path.exists(atlas_storm_hook_dir):
            storm_extlib_dir = os.path.join(params.storm_component_home_dir,
                                            "extlib")
            if os.path.exists(storm_extlib_dir):
                src_files = os.listdir(atlas_storm_hook_dir)
                for file_name in src_files:
                    atlas_storm_hook_file_name = os.path.join(
                        atlas_storm_hook_dir, file_name)
                    storm_lib_file_name = os.path.join(storm_extlib_dir,
                                                       file_name)
                    if (os.path.isfile(atlas_storm_hook_file_name)):
                        Link(storm_lib_file_name,
                             to=atlas_storm_hook_file_name)
Exemplo n.º 16
0
    def start(self, env, upgrade_type=None):
        import params
        env.set_params(params)
        self.configure(env)
        daemon_cmd = format('{knox_bin} start')
        no_op_test = format(
            'ls {knox_pid_file} >/dev/null 2>&1 && ps -p `cat {knox_pid_file}` >/dev/null 2>&1'
        )
        setup_ranger_knox(upgrade_type=upgrade_type)
        # Used to setup symlink, needed to update the knox managed symlink, in case of custom locations
        if os.path.islink(params.knox_managed_pid_symlink):
            Link(
                params.knox_managed_pid_symlink,
                to=params.knox_pid_dir,
            )

        update_knox_logfolder_permissions()

        try:
            Execute(daemon_cmd,
                    user=params.knox_user,
                    environment={'JAVA_HOME': params.java_home},
                    not_if=no_op_test)
        except:
            show_logs(params.knox_logs_dir, params.knox_user)
            raise
Exemplo n.º 17
0
def sqoop(type=None):
    import params
    Link(params.sqoop_lib + "/mysql-connector-java.jar",
         to='/usr/share/java/mysql-connector-java.jar')

    jdbc_connector()

    Directory(params.sqoop_conf_dir,
              owner=params.sqoop_user,
              group=params.user_group,
              create_parents=True)

    configs = {}
    configs.update(params.config['configurations']['sqoop-site'])

    XmlConfig(
        "sqoop-site.xml",
        conf_dir=params.sqoop_conf_dir,
        configurations=configs,
        configuration_attributes=params.config['configuration_attributes']
        ['sqoop-site'],
        owner=params.sqoop_user,
        group=params.user_group)

    if params.has_atlas:
        atlas_sqoop_hook_dir = os.path.join(params.atlas_home_dir, "hook",
                                            "sqoop")
        if os.path.exists(atlas_sqoop_hook_dir):
            Link(os.path.join(params.sqoop_conf_dir, params.atlas_conf_file),
                 to=os.path.join(params.atlas_conf_dir,
                                 params.atlas_conf_file))

            src_files = os.listdir(atlas_sqoop_hook_dir)
            for file_name in src_files:
                atlas_sqoop_hook_file_name = os.path.join(
                    atlas_sqoop_hook_dir, file_name)
                sqoop_lib_file_name = os.path.join(params.sqoop_lib, file_name)
                if (os.path.isfile(atlas_sqoop_hook_file_name)):
                    Link(sqoop_lib_file_name, to=atlas_sqoop_hook_file_name)

    File(format("{sqoop_conf_dir}/sqoop-env.sh"),
         owner=params.sqoop_user,
         group=params.user_group,
         content=InlineTemplate(params.sqoop_env_sh_template))
    update_config_permissions(
        ["sqoop-env-template.sh", "sqoop-site-template.xml", "sqoop-site.xml"])
    pass
Exemplo n.º 18
0
 def test_action_delete(self, exists_mock, unlink_mock):     
   exists_mock.return_value = True
   
   with Environment('/') as env:
     Link("/some_path",
          action = "delete"
     )    
   unlink_mock.assert_called_with("/some_path")
Exemplo n.º 19
0
    def test_action_create_symlink_clean_create(self, symlink_mock,
                                                lexists_mock):
        lexists_mock.return_value = False

        with Environment('/') as env:
            Link("/some_path", to="/a/b/link_to_path")

        symlink_mock.assert_called_with("/a/b/link_to_path", "/some_path")
Exemplo n.º 20
0
    def test_action_create_hardlink_clean_create(self, link_mock, lexists_mock,
                                                 exists_mock, isdir_mock):
        lexists_mock.return_value = False
        exists_mock.return_value = True
        isdir_mock.return_value = False

        with Environment('/') as env:
            Link("/some_path", hard=True, to="/a/b/link_to_path")

        link_mock.assert_called_with("/a/b/link_to_path", "/some_path")
Exemplo n.º 21
0
    def test_action_create_relink(self, symlink_mock, unlink_mock, islink_mock,
                                  lexists_mock, realmock):
        lexists_mock.return_value = True
        realmock.return_value = "/old_to_link_path"
        islink_mock.return_value = True
        with Environment('/') as env:
            Link("/some_path", to="/a/b/link_to_path")

        unlink_mock.assert_called_with("/some_path")
        symlink_mock.assert_called_with("/a/b/link_to_path", "/some_path")
Exemplo n.º 22
0
 def install(self, env):
     from params import presto_server_path
     presto_etc_path = presto_server_path + '/etc'
     presto_catalog_path = presto_etc_path + '/catalog'
     Execute('wget --no-check-certificate {0}  -O /tmp/{1}'.format(PRESTO_TAR_URL, 'presto-server.tar.gz'))
     Directory([presto_server_path, presto_etc_path, presto_catalog_path], create_parents=True, owner='root', group='root')
     Execute('tar xf {0} -C {1} --strip-components 1'.format('/tmp/presto-server.tar.gz', presto_server_path))
     Link('/usr/hdp/current/presto-server', to=presto_server_path)
     self.install_packages(env)
     self.configure(env)
Exemplo n.º 23
0
    def test_action_create_hardlink_target_doesnt_exist(
            self, lexists_mock, exists_mock):
        lexists_mock.return_value = False
        exists_mock.return_value = False

        with Environment('/') as env:
            try:
                Link("/some_path", hard=True, to="/a/b/link_to_path")
                self.fail("Must fail when target directory do doenst exist")
            except Fail as e:
                self.assertEqual(
                    'Failed to apply Link[\'/some_path\'], linking to nonexistent location /a/b/link_to_path',
                    str(e))
Exemplo n.º 24
0
    def link_metrics_sink_jar(self):
        import params
        # Add storm metrics reporter JAR to storm-ui-server classpath.
        # Remove symlinks. They can be there, if you doing upgrade from HDP < 2.2 to HDP >= 2.2
        Link(format("{storm_lib_dir}/ambari-metrics-storm-sink.jar"),
             action="delete")
        # On old HDP 2.1 versions, this symlink may also exist and break EU to newer versions
        Link("/usr/lib/storm/lib/ambari-metrics-storm-sink.jar",
             action="delete")

        if check_stack_feature(StackFeature.STORM_METRICS_APACHE_CLASSES,
                               params.version_for_stack_feature_checks):
            sink_jar = params.metric_collector_sink_jar
        else:
            sink_jar = params.metric_collector_legacy_sink_jar

        Execute(format(
            "{sudo} ln -s {sink_jar} {storm_lib_dir}/ambari-metrics-storm-sink.jar"
        ),
                not_if=format(
                    "ls {storm_lib_dir}/ambari-metrics-storm-sink.jar"),
                only_if=format("ls {sink_jar}"))
Exemplo n.º 25
0
    def test_action_create_hardlink_target_is_dir(self, lexists_mock,
                                                  exists_mock, isdir_mock):
        lexists_mock.return_value = False
        exists_mock.return_value = True
        isdir_mock = True

        with Environment('/') as env:
            try:
                Link("/some_path", hard=True, to="/a/b/link_to_path")
                self.fail("Must fail when hardlinking to directory")
            except Fail as e:
                self.assertEqual(
                    'Failed to apply Link[\'/some_path\'], cannot create hard link to a directory (/a/b/link_to_path)',
                    str(e))
Exemplo n.º 26
0
def registry(env, upgrade_type=None):
    import params
    ensure_base_directories()

    File(format("{conf_dir}/registry-env.sh"),
         owner=params.registry_user,
         content=InlineTemplate(params.registry_env_sh_template))

    File(params.bootstrap_storage_command,
         owner=params.registry_user,
         content=InlineTemplate(params.bootstrap_sh_template),
         mode=0755)

    if params.security_enabled:
        if params.registry_jaas_conf_template:
            File(format("{conf_dir}/registry_jaas.conf"),
                 owner=params.registry_user,
                 content=InlineTemplate(params.registry_jaas_conf_template))
        else:
            TemplateConfig(format("{conf_dir}/registry_jaas.conf"),
                           owner=params.registry_user)
    # On some OS this folder could be not exists, so we will create it before pushing there files
    Directory(params.limits_conf_dir,
              create_parents=True,
              owner='root',
              group='root')

    Directory(
        [params.jar_storage],
        owner=params.registry_user,
        group=params.user_group,
        create_parents=True,
        cd_access="a",
        mode=0755,
    )

    File(os.path.join(params.limits_conf_dir, 'registry.conf'),
         owner='root',
         group='root',
         mode=0644,
         content=Template("registry.conf.j2"))

    File(format("{conf_dir}/registry.yaml"),
         content=Template("registry.yaml.j2"),
         owner=params.registry_user,
         group=params.user_group,
         mode=0644)

    if not os.path.islink(params.registry_managed_log_dir):
        Link(params.registry_managed_log_dir, to=params.registry_log_dir)
Exemplo n.º 27
0
 def test_action_create_failed_due_to_file_exists(self, 
                        lexists_mock, realmock):
   lexists_mock.side_effect = [True, False]
   realmock.return_value = "/old_to_link_path"
   with Environment('/') as env:
     try:
       Link("/some_path",
            to = "/a/b/link_to_path"
       )
       
       self.fail("Must fail when directory or file with name /some_path exist")
     except Fail as e:
       self.assertEqual("Link['/some_path'] trying to create a symlink with the same name as an existing file or directory",
                      str(e))
Exemplo n.º 28
0
def sqoop(type=None):
    import params
    Link(params.sqoop_lib + "/mysql-connector-java.jar",
         to='/usr/share/java/mysql-connector-java.jar')
    Directory(params.sqoop_conf_dir,
              owner=params.sqoop_user,
              group=params.user_group,
              recursive=True)
    File(format("{sqoop_conf_dir}/sqoop-env.sh"),
         owner=params.sqoop_user,
         group=params.user_group,
         content=InlineTemplate(params.sqoop_env_sh_template))
    update_config_permissions(
        ["sqoop-env-template.sh", "sqoop-site-template.xml", "sqoop-site.xml"])
    pass
Exemplo n.º 29
0
def select(stack_name, package, version, try_create=True, ignore_errors=False):
  """
  Selects a config version for the specified package.
  :param stack_name: the name of the stack
  :param package: the name of the package, as-used by conf-select
  :param version: the version number to create
  :param try_create: optional argument to attempt to create the directory before setting it
  :param ignore_errors: optional argument to ignore any error and simply log a warning
  """
  try:
    if not _valid(stack_name, package, version):
      return

    if try_create:
      create(stack_name, package, version)

    shell.checked_call(get_cmd("set-conf-dir", package, version), logoutput=False, quiet=False, sudo=True)

    # for consistency sake, we must ensure that the /etc/<component>/conf symlink exists and
    # points to /usr/hdp/current/<component>/conf - this is because some people still prefer to
    # use /etc/<component>/conf even though /usr/hdp is the "future"
    if package in PACKAGE_DIRS:
      Logger.info("Ensuring that {0} has the correct symlink structure".format(package))

      directory_list = PACKAGE_DIRS[package]
      for directory_structure in directory_list:
        conf_dir = directory_structure["conf_dir"]
        current_dir = directory_structure["current_dir"]

        # if /etc/<component>/conf is not a symlink, we need to change it
        if not os.path.islink(conf_dir):
          # if it exists, try to back it up
          if os.path.exists(conf_dir):
            parent_directory = os.path.dirname(conf_dir)
            conf_install_dir = os.path.join(parent_directory, "conf.backup")

            Execute(("cp", "-R", "-p", conf_dir, conf_install_dir),
              not_if = format("test -e {conf_install_dir}"), sudo = True)

            Directory(conf_dir, action="delete")

          Link(conf_dir, to = current_dir)
  except Exception, exception:
    if ignore_errors is True:
      Logger.warning("Could not select the directory for package {0}. Error: {1}".format(package,
        str(exception)))
    else:
      raise
Exemplo n.º 30
0
    def _relink_configurations_with_conf_select(self, stack_id, stack_version):
        """
    Sets up the required structure for /etc/<component>/conf symlinks and <stack-root>/current
    configuration symlinks IFF the current stack is < HDP 2.3+ and the new stack is >= HDP 2.3

    stack_id:  stack id, ie HDP-2.3
    stack_version:  version to set, ie 2.3.0.0-1234
    """
        if stack_id is None:
            Logger.info(
                "Cannot create config links when stack_id is not defined")
            return

        args = stack_id.upper().split('-')
        if len(args) != 2:
            Logger.info(
                "Unrecognized stack id {0}, cannot create config links".format(
                    stack_id))
            return

        target_stack_version = args[1]
        if not (target_stack_version and check_stack_feature(
                StackFeature.CONFIG_VERSIONING, target_stack_version)):
            Logger.info("Configuration symlinks are not needed for {0}".format(
                stack_version))
            return

        # After upgrading hdf-select package from HDF-2.X to HDF-3.Y, we need to create this symlink
        if self.stack_name.upper() == "HDF" \
                and not sudo.path_exists("/usr/bin/conf-select") and sudo.path_exists("/usr/bin/hdfconf-select"):
            Link("/usr/bin/conf-select", to="/usr/bin/hdfconf-select")

        restricted_packages = conf_select.get_restricted_packages()

        if 0 == len(restricted_packages):
            Logger.info(
                "There are no restricted conf-select packages for this installation"
            )
        else:
            Logger.info("Restricting conf-select packages to {0}".format(
                restricted_packages))

        for package_name, directories in conf_select.get_package_dirs(
        ).iteritems():
            if 0 == len(restricted_packages
                        ) or package_name in restricted_packages:
                conf_select.convert_conf_directories_to_symlinks(
                    package_name, stack_version, directories)