Ejemplo n.º 1
0
def install_apollo():
    import params
    Directory(
        [params.pid_dir, params.log_dir, params.conf_dir + '/adminservice'],
        owner=params.apollo_user,
        group=params.user_group,
        mode=0755,
        create_parents=True)

    if not os.path.exists(Script.get_stack_root() + '/' + params.
                          version_dir_adminservice) or not os.path.exists(
                              params.install_dir_adminservice):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir_adminservice)
        Execute('rm -rf %s' % params.install_dir_adminservice)
        Execute('/bin/rm -f /tmp/' + params.filename_adminservice)
        Execute('wget ' + params.download_url_adminservice + ' -O /tmp/' +
                params.filename_adminservice,
                user=params.apollo_user)
        Execute('unzip /tmp/' + params.filename_adminservice + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' +
                params.version_dir_adminservice + ' ' +
                params.install_dir_adminservice)
        Execute('cp -r ' + params.install_dir_adminservice + '/config/* ' +
                params.conf_dir + '/adminservice')
        Execute('rm -rf ' + params.install_dir_adminservice + '/config')
        Execute('ln -s ' + params.conf_dir + '/adminservice' + ' ' +
                params.install_dir_adminservice + '/config')
        Execute('chown -R %s:%s %s/%s' %
                (params.apollo_user, params.user_group,
                 Script.get_stack_root(), params.version_dir_adminservice))
        Execute('chown -R %s:%s %s' % (params.apollo_user, params.user_group,
                                       params.install_dir_adminservice))
Ejemplo n.º 2
0
 def install_ranger(self):
     import os, params
     Directory([params.ranger_conf, params.admin_log_dir],
               owner=params.unix_user,
               group=params.user_group,
               mode=0775,
               create_parents=True)
     if not os.path.exists(Script.get_stack_root() + '/' +
                           params.version_dir_admin) or not os.path.exists(
                               params.install_dir_admin):
         Execute('rm -rf %s' % params.install_dir_admin)
         Execute('wget ' + params.download_url_admin + ' -O /tmp/' +
                 params.filename_admin,
                 user=params.unix_user)
         Execute('tar -zxf /tmp/' + params.filename_admin + ' -C  ' +
                 Script.get_stack_root())
         Execute('ln -s ' + Script.get_stack_root() + '/' +
                 params.version_dir_admin + ' ' + params.install_dir_admin)
         Execute(' rm -rf ' + params.install_dir_admin + '/conf')
         Execute(' rm -rf ' + params.ranger_conf + '/conf/*')
         Execute(' rm -rf ' + params.install_dir_admin +
                 '/ews/webapp/WEB-INF/classes/conf')
         Execute('ln -s ' + params.ranger_conf + ' ' +
                 params.install_dir_admin + '/conf')
         Execute('ln -s ' + params.ranger_conf + ' ' +
                 params.install_dir_admin +
                 '/ews/webapp/WEB-INF/classes/conf')
         Execute('ln -s ' + params.admin_log_dir + ' ' +
                 params.install_dir_admin + '/ews/logs')
         Execute('chown -R %s:%s %s/%s' %
                 (params.unix_user, params.user_group,
                  Script.get_stack_root(), params.version_dir_admin))
         Execute('chown -R %s:%s %s' % (params.unix_user, params.user_group,
                                        params.install_dir_admin))
         Execute('/bin/rm -f /tmp/' + params.filename_admin)
Ejemplo n.º 3
0
def install_pika():
    import params
    Directory([
        params.pid_dir, params.log_dir, params.conf_dir, params.db_dir,
        params.dump_dir
    ],
              owner=params.pika_user,
              group=params.pika_group,
              mode=0755,
              create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('/bin/rm -f /tmp/' + params.filename)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename,
                user=params.pika_user)
        Execute('tar -jxf /tmp/' + params.filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s %s/output %s ' %
                (params.stack_root, params.version_dir))
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir +
                ' ' + params.install_dir)
        Execute('mkdir ' + params.install_dir + '/log && chmod 777 ' +
                params.install_dir + '/log')
        Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/pika.sh" %
                params.install_dir)
        Execute('chown -R %s:%s %s/%s' %
                (params.pika_user, params.pika_group, Script.get_stack_root(),
                 params.version_dir))
        Execute('chown -R %s:%s %s' %
                (params.pika_user, params.pika_group, params.install_dir))
Ejemplo n.º 4
0
def install_rocketmq():
    import params
    Directory([
        params.pid_dir, params.log_dir, params.conf_dir,
        params.store_commitlog, params.store_queue
    ],
              owner=params.rocketmq_user,
              group=params.user_group,
              mode=0755,
              create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('/bin/rm -f /tmp/' + params.filename)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename,
                user=params.rocketmq_user)
        Execute('tar -zxvf /tmp/' + params.filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir +
                ' ' + params.install_dir)

        Execute('rm -rf ' + params.install_dir + '/conf  ')
        Execute('ln -s ' + params.conf_dir + ' ' + params.install_dir +
                '/conf  ')

        Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/rocketmq.sh" %
                params.install_dir)
        Execute('chown -R %s:%s %s/%s' %
                (params.rocketmq_user, params.user_group,
                 Script.get_stack_root(), params.version_dir))
        Execute('chown -R %s:%s %s' %
                (params.rocketmq_user, params.user_group, params.install_dir))
Ejemplo n.º 5
0
def install_nifi_toolkit():
    import params
    Directory([params.toolkit_tmp_dir, params.nifi_toolkit_conf_dir],
              owner=params.nifi_user,
              group=params.nifi_group,
              mode=0775,
              create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.toolkit_version_dir) or not os.path.exists(
                              params.toolkit_install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.toolkit_version_dir)
        Execute('rm -rf %s' % params.toolkit_install_dir)
        Execute('wget ' + params.toolkit_download_url + ' -O /tmp/' +
                params.toolkit_filename,
                user=params.nifi_user)
        Execute('tar -zxf /tmp/' + params.toolkit_filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' +
                params.toolkit_version_dir + ' ' + params.toolkit_install_dir)

        Execute(' cp -rf ' + params.toolkit_install_dir + '/conf/*  ' +
                params.nifi_toolkit_conf_dir)
        Execute(' rm -rf ' + params.toolkit_install_dir + '/conf')
        Execute('ln -s ' + params.nifi_toolkit_conf_dir + ' ' +
                params.toolkit_install_dir + '/conf')
        Execute('chown -R %s:%s %s/%s' %
                (params.nifi_user, params.nifi_group, Script.get_stack_root(),
                 params.toolkit_version_dir))
        Execute(
            'chown -R %s:%s %s' %
            (params.nifi_user, params.nifi_group, params.toolkit_install_dir))
        Execute('/bin/rm -f /tmp/' + params.toolkit_filename)
Ejemplo n.º 6
0
 def install_ranger(self):
     import os, params
     Directory([params.ranger_tagsync_conf],
               owner=params.unix_user,
               group=params.user_group,
               mode=0775,
               create_parents=True)
     if not os.path.exists(Script.get_stack_root() + '/' + params.
                           version_dir_tagsync) or not os.path.exists(
                               params.install_dir_tagsync):
         Execute('rm -rf %s' % params.install_dir_tagsync)
         Execute('wget ' + params.download_url_tagsync + ' -O /tmp/' +
                 params.filename_tagsync,
                 user=params.unix_user)
         Execute('tar -zxf /tmp/' + params.filename_tagsync + ' -C  ' +
                 Script.get_stack_root())
         Execute('ln -s ' + Script.get_stack_root() + '/' +
                 params.version_dir_tagsync + ' ' +
                 params.install_dir_tagsync)
         Execute(' rm -rf ' + params.install_dir_tagsync + '/conf')
         Execute('ln -s ' + params.ranger_tagsync_conf + ' ' +
                 params.install_dir_tagsync + '/conf')
         Execute('ln -s ' + params.tagsync_log_dir + ' ' +
                 params.install_dir_tagsync + '/logs')
         Execute('chown -R %s:%s %s/%s' %
                 (params.unix_user, params.user_group,
                  Script.get_stack_root(), params.version_dir_tagsync))
         Execute('chown -R %s:%s %s' % (params.unix_user, params.user_group,
                                        params.install_dir_tagsync))
         Execute('/bin/rm -f /tmp/' + params.filename_tagsync)
Ejemplo n.º 7
0
def install_wherehows():
    import params
    Directory([
        params.log_dir, params.conf_dir + '/backend',
        params.conf_dir + '/jobs', '/var/run/wherehows'
    ],
              owner=params.wherehows_user,
              group=params.user_group,
              mode=0755,
              create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/' + params.version_dir_backend
                          ) or not os.path.exists(params.install_dir_backend):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' + params.version_dir_backend)
        Execute('rm -rf %s' % params.install_dir_backend)
        Execute('/bin/rm -f /tmp/' + params.filename_backend)
        Execute(
            'wget ' + params.download_url_backend + ' -O /tmp/' + params.filename_backend,
            user=params.wherehows_user)
        Execute('tar -zxf /tmp/' + params.filename_backend + ' -C  ' + Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir_backend + ' ' +
                params.install_dir_backend)
        Execute('rm -rf %s/logs && ln -s %s %s/logs ' %
                (params.install_dir_backend, params.log_dir,
                 params.install_dir_backend))
        Execute('rm -rf %s/conf && ln -s %s %s/conf ' %
                (params.install_dir_backend, params.conf_dir,
                 params.install_dir_backend))
        Execute('chown -R %s:%s %s/%s' %
                (params.wherehows_user, params.user_group,
                 params.stack_root,params.version_dir_backend))
        Execute(
            'chown -R %s:%s %s' % (params.wherehows_user, params.user_group,
                                   params.install_dir_backend))
Ejemplo n.º 8
0
def install_beacon():
    import params
    Directory([params.etc_prefix_dir],
              owner=params.beacon_user,
              group=params.user_group,
              mode=0755,
              create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/' + params.version_dir) or not os.path.exists(
            params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' + params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute(
            'wget ' + params.download_url + ' -O /tmp/' + params.filename,
            user=params.beacon_user)
        Execute('tar -zxf /tmp/' + params.filename + ' -C  ' + Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir + ' ' + params.install_dir)
        Execute(' cp -r ' + params.install_dir + '/conf/* ' + params.etc_prefix_dir)
        Execute(' rm -rf ' + params.install_dir + '/conf')
        Execute('ln -s ' + params.etc_prefix_dir + ' ' + params.install_dir +
                '/conf')

        Execute('chown -R %s:%s %s/%s' %
                (params.beacon_user, params.user_group, params.stack_root, params.version_dir))
        Execute('chown -R %s:%s %s' % (params.beacon_user, params.user_group,
                                       params.install_dir))
        Execute('/bin/rm -f /tmp/' + params.filename)
Ejemplo n.º 9
0
def install_dbus():
    import params
    Directory([params.pid_dir, params.log_dir, params.conf_dir],
              owner=params.dbus_user,
              group=params.dbus_group,
              mode=0755,
              create_parents=True)

    params.HdfsResource('/user/dbus',
                        type="directory",
                        action="create_on_execute",
                        owner=params.dbus_user,
                        mode=0755)
    params.HdfsResource('/dbus',
                        type="directory",
                        action="create_on_execute",
                        owner=params.dbus_user,
                        mode=0755)
    params.HdfsResource(None, action="execute")

    # copy_to_hdfs(
    #     "udf",
    #     params.user_group,
    #     params.hdfs_user,
    #     custom_src_file = '',
    #     custom_dest_file='/dbus/udf/abc.jar',
    #     replace_existing_files=True)

    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('/bin/rm -f /tmp/' + params.filename)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename,
                user=params.dbus_user)
        Execute('tar -jxf /tmp/' + params.filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir +
                ' ' + params.install_dir)
        Execute('cp -r ' + params.install_dir + '/conf/* ' + params.conf_dir)
        Execute('rm -rf ' + params.install_dir + '/conf')
        Execute('ln -s ' + params.conf_dir + ' ' + params.install_dir +
                '/conf')
        Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/dbus.sh" %
                params.install_dir)
        Execute('chown -R %s:%s %s/%s' %
                (params.dbus_user, params.dbus_group, params.stack_root,
                 params.version_dir))
        Execute('chown -R %s:%s %s' %
                (params.dbus_user, params.dbus_group, params.install_dir))
Ejemplo n.º 10
0
def install_xlearning():
    import params
    Directory([params.conf_dir, params.log_dir],
              owner=params.xlearning_user,
              group=params.xlearning_group,
              mode=0755,
              create_parents=True)
    params.HdfsResource('/xlearning/staging',
                        type="directory",
                        action="create_on_execute",
                        owner=params.xlearning_user,
                        mode=0755)
    params.HdfsResource('/xlearning/eventlog',
                        type="directory",
                        action="create_on_execute",
                        owner=params.xlearning_user,
                        mode=0755)
    params.HdfsResource('/xlearning/history',
                        type="directory",
                        action="create_on_execute",
                        owner=params.xlearning_user,
                        mode=0755)
    params.HdfsResource(None, action="execute")

    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename,
                user=params.xlearning_user)
        Execute('tar -zxf /tmp/' + params.filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir +
                ' ' + params.install_dir)
        Execute(' cp -r ' + params.install_dir + '/conf/* ' + params.conf_dir)
        Execute(' rm -rf ' + params.install_dir + '/conf')
        Execute('ln -s ' + params.conf_dir + ' ' + params.install_dir +
                '/conf')
        Execute('ln -s ' + params.log_dir + ' ' + params.install_dir +
                '/logs/xlearning')

        Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/xlearning.sh" %
                params.install_dir)
        Execute('chown -R %s:%s %s/%s' %
                (params.xlearning_user, params.xlearning_group,
                 params.stack_root, params.version_dir))
        Execute('chown -R %s:%s %s' %
                (params.xlearning_user, params.xlearning_group,
                 params.install_dir))
        Execute('/bin/rm -f /tmp/' + params.filename)
Ejemplo n.º 11
0
def install_wormhole():
    import params
    Directory([params.pid_dir, params.log_dir, params.conf_dir],
              owner=params.wormhole_user,
              group=params.wormhole_group,
              mode=0755,
              create_parents=True)
    params.HdfsResource('/user/wormhole',
                        type="directory",
                        action="create_on_execute",
                        owner=params.wormhole_user,
                        mode=0755)
    params.HdfsResource('/wormhole',
                        type="directory",
                        action="create_on_execute",
                        owner=params.wormhole_user,
                        mode=0755)
    params.HdfsResource('/wormhole/udfjars',
                        type="directory",
                        action="create_on_execute",
                        owner=params.wormhole_user,
                        mode=0755)
    params.HdfsResource(None, action="execute")

    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('/bin/rm -f /tmp/' + params.filename)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename,
                user=params.wormhole_user)
        Execute('tar -jxf /tmp/' + params.filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir +
                ' ' + params.install_dir)
        Execute('cp -r ' + params.install_dir + '/conf/* ' + params.conf_dir)
        Execute('rm -rf ' + params.install_dir + '/conf')
        Execute('ln -s ' + params.conf_dir + ' ' + params.install_dir +
                '/conf')
        Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/wormhole.sh" %
                params.install_dir)
        Execute('chown -R %s:%s %s/%s' %
                (params.wormhole_user, params.wormhole_group,
                 Script.get_stack_root(), params.version_dir))
        Execute(
            'chown -R %s:%s %s' %
            (params.wormhole_user, params.wormhole_group, params.install_dir))
Ejemplo n.º 12
0
def install_graphouse():
    import params
    Directory([params.graphouse_conf_dir, graphouse_log_dir],
              owner=params.graphite_user,
              group=params.user_group,
              mode=0775,
              cd_access="a",
              create_parents=True)

    File('/tmp/init_clickhouse.sql',
         content=StaticFile("init_clickhouse.sql"),
         mode=0755)
    # todo excute clickhouse.sql

    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename,
                user=params.graphite_user)
        Execute('tar -xf /tmp/' + params.filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir +
                ' ' + params.install_dir)

        Execute(' rm -rf ' + params.install_dir + '/conf')
        Execute('ln -s ' + params.graphouse_conf_dir + ' ' +
                params.install_dir + '/conf')

        Execute(' rm -rf ' + params.install_dir + '/log')
        Execute('ln -s ' + params.log_dir + ' ' + params.install_dir + '/log')

        Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/graphouse.sh" %
                params.install_dir)
        Execute('chown -R %s:%s %s/%s' %
                (params.graphite_user, params.user_group, params.stack_root,
                 params.version_dir))
        Execute('chown -R %s:%s %s' %
                (params.graphite_user, params.user_group, params.install_dir))
        Execute('/bin/rm -f /tmp/' + params.filename)

        File('/etc/init.d/graphouse',
             content=StaticFile("graphouse.init"),
             mode=0755)
        Execute('chkconfig graphouse on')
Ejemplo n.º 13
0
def install_azkaban():
    import params
    Directory([params.conf_dir, params.log_dir],
              owner=params.azkaban_user,
              group=params.user_group,
              mode=0755,
              create_parents=True)
    File(params.install_dir_executor +
         '/lib/azkaban-ldap-usermanager-1.2.1-SNAPSHOT.jar',
         content=StaticFile("azkaban-ldap-usermanager-1.2.1-SNAPSHOT.jar"),
         mode=0755)

    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir_executor) or not os.path.exists(
                              params.install_dir_executor):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir_executor)
        Execute('rm -rf %s' % params.install_dir_executor)
        Execute('wget ' + params.download_url_executor + ' -O /tmp/' +
                params.filename_executor,
                user=params.azkaban_user)
        Execute('tar -zxf /tmp/' + params.filename_executor + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' +
                params.version_dir_executor + ' ' +
                params.install_dir_executor)
        Execute(' cp -r ' + params.install_dir_executor + '/conf/* ' +
                params.conf_dir)
        Execute(' rm -rf ' + params.install_dir_executor + '/conf')
        Execute('ln -s ' + params.conf_dir + ' ' +
                params.install_dir_executor + '/conf')
        Execute('ln -s ' + params.log_dir + ' ' + params.install_dir_executor +
                '/logs/azkaban')

        Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/azkaban.sh" %
                params.install_dir_executor)
        Execute('chown -R %s:%s %s/%s' %
                (params.azkaban_user, params.user_group,
                 Script.get_stack_root(), params.version_dir_executor))
        Execute('chown -R %s:%s %s' % (params.azkaban_user, params.user_group,
                                       params.install_dir_executor))
        Execute('/bin/rm -f /tmp/' + params.filename_executor)
Ejemplo n.º 14
0
def install_registry():
    import params
    Directory(['/data1/registry', '/etc/harbor'],
              mode=0755,
              create_parents=True)

    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('/bin/rm -f /tmp/' + params.filename)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename)
        Execute('tar -jxf /tmp/' + params.filename + ' -C  ' +
                Script.get_stack_root())
        File('/etc/harbor/harbor.cfg',
             content=InlineTemplate(params.harbor_cfg_content),
             mode=0755)

        Execute(params.install_dir + '/install.sh')
Ejemplo n.º 15
0
def install_nifi_registry():
    import params
    Directory([params.nifi_registry_config_dir],
              owner=params.nifi_user,
              group=params.nifi_group,
              mode=0775,
              create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.registry_version_dir) or not os.path.exists(
                              params.registry_install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.registry_version_dir)
        Execute('rm -rf %s' % params.registry_install_dir)
        Execute('wget ' + params.registry_download_url + ' -O /tmp/' +
                params.registry_filename,
                user=params.nifi_registry_user)
        Execute('tar -zxf /tmp/' + params.registry_filename + ' -C  ' +
                Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' +
                params.registry_version_dir + ' ' +
                params.registry_install_dir)

        Execute(' cp -rf ' + params.registry_install_dir + '/conf/*  ' +
                params.registry_install_dir)
        Execute(' rm -rf ' + params.registry_install_dir + '/conf')
        Execute('ln -s ' + params.registry_install_dir + ' ' +
                params.registry_install_dir + '/conf')
        Execute('chown -R %s:%s %s/%s' %
                (params.nifi_registry_user, params.nifi_registry_group,
                 Script.get_stack_root(), params.registry_version_dir))
        Execute('chown -R %s:%s %s' %
                (params.nifi_registry_user, params.nifi_registry_group,
                 params.registry_install_dir))
        Execute('/bin/rm -f /tmp/' + params.registry_filename)
        Execute('export JAVA_HOME=' + params.jdk64_home + ';' +
                params.nifi_registry_bin_dir +
                '/nifi-registry.sh install >> ' +
                params.nifi_registry_log_file,
                user=params.nifi_registry_user)
Ejemplo n.º 16
0
def install_sonar():
    import params
    if not os.path.exists(Script.get_stack_root() + '/' +
                          params.version_dir) or not os.path.exists(
                              params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' +
                params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('wget ' + params.download_url + ' -O /tmp/' + params.filename,
                user='******')
        Execute('cd %s ; unzip /tmp/%s' % (params.stack_root, params.filename))
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir +
                ' ' + params.install_dir)
        Execute(' mkdir -p ' + params.conf_dir + ' && cp -r ' +
                params.install_dir + '/conf/* ' + params.conf_dir)

        Execute(' cp -r ' + params.install_dir + '/conf/* ' + params.conf_dir)

        Execute(' rm -rf ' + params.install_dir + '/conf')
        Execute('ln -s ' + params.conf_dir + ' ' + params.install_dir +
                '/conf')

        Execute('/bin/rm -f /tmp/' + params.filename)
Ejemplo n.º 17
0
def install_doris():
    import params
    Directory([params.pid_dir, params.log_dir],
              owner=params.doris_user,
              group=params.doris_group,
              mode=0755,
              create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/' + params.version_dir) or not os.path.exists(
            params.install_dir):
        Execute('rm -rf %s' % Script.get_stack_root() + '/' + params.version_dir)
        Execute('rm -rf %s' % params.install_dir)
        Execute('/bin/rm -f /tmp/' + params.filename)
        Execute(
            'wget ' + params.download_url + ' -O /tmp/' + params.filename,
            user=params.doris_user)
        Execute('tar -zxf /tmp/' + params.filename + ' -C  ' + Script.get_stack_root())
        Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir + ' ' + params.install_dir)
        Execute('mkdir ' + params.install_dir + '/log && chmod 777 ' +
                params.install_dir + '/log')
        Execute('chown -R %s:%s %s/%s' %
                (params.doris_user, params.doris_group,Script.get_stack_root(), params.version_dir))
        Execute('chown -R %s:%s %s' % (params.doris_user, params.doris_group,
                                       params.install_dir))
Ejemplo n.º 18
0
def setup_atlas_jar_symlinks(hook_name, jar_source_dir):
    """
  In HDP 2.3, 2.4, and 2.5.0.0, Sqoop and Storm still relied on the following method to setup Atlas hooks
  because the RPM for Sqoop and Storm did not bring in any dependencies.

  /usr/hdp/current/storm-*/libext/ should contain symlinks for every jar in /usr/hdp/current/atlas-server/hooks/storm/somejavafile.jar
  /usr/hdp/current/sqoop-*/lib/    should contain symlinks for every jar in /usr/hdp/current/atlas-server/hooks/sqoop/somejavafile.jar

  In HDP 2.5.x.y, we plan to have the Sqoop and Storm rpms have additional dependencies on some sqoop-atlas-hook and storm-atlas-hook
  rpms, respectively, that will bring in the necessary jars and create the symlinks.

  If atlas is present on this host, then link the jars from
  {stack_root}/current/{hook_name}/lib/name_version.jar -> {jar_source_dir}/name_version.jar
  @param hook_name: one of sqoop, storm
  @param jar_source_dir: directory of where the symlinks need to be created from.
  """
    import params

    stack_root = Script.get_stack_root()
    atlas_home_dir = os.path.join(stack_root, "current", "atlas-server")

    # if this is an upgrade/downagrade, then we must link in the correct version
    # which may not be "current", so change the home directory location
    upgrade_type = Script.get_upgrade_type(
        default("/commandParams/upgrade_type", ""))
    if upgrade_type is not None:
        version_dir_segment = stack_features.get_stack_feature_version(
            Script.get_config())
        atlas_home_dir = os.path.join(stack_root, version_dir_segment, "atlas")

    # Will only exist if this host contains Atlas Server
    atlas_hook_dir = os.path.join(atlas_home_dir, "hook", hook_name)

    if os.path.exists(atlas_hook_dir):
        Logger.info(
            "Atlas Server is present on this host, will symlink jars inside of %s to %s if not already done."
            % (jar_source_dir, atlas_hook_dir))

        src_files = os.listdir(atlas_hook_dir)
        for file_name in src_files:
            atlas_hook_file_name = os.path.join(atlas_hook_dir, file_name)
            source_lib_file_name = os.path.join(jar_source_dir, file_name)
            if os.path.isfile(atlas_hook_file_name):
                Link(source_lib_file_name, to=atlas_hook_file_name)
    else:
        Logger.info("Atlas hook directory path {0} doesn't exist".format(
            atlas_hook_dir))
Ejemplo n.º 19
0
def install_graphite_api():
    import params
    Directory([
        params.graphite_conf_dir, params.log_dir, params.pid_dir,
        '/srv/graphite'
    ],
              owner=params.graphite_user,
              group=params.user_group,
              mode=0775,
              cd_access="a",
              create_parents=True)

    if not os.path.exists(params.install_dir_graphite_api):
        Execute('wget ' + params.download_url_graphite_api + ' -O /tmp/' +
                params.filename_graphite_api,
                user=params.graphite_user)
        Execute('tar -xf /tmp/' + params.filename_graphite_api + ' -C  ' +
                Script.get_stack_root())

        Execute('chown -R %s:%s %s' % (params.graphite_user, params.user_group,
                                       params.install_dir_graphite_api))
        Execute('/bin/rm -f /tmp/' + params.filename_graphite_api)

        File(
            params.install_dir_graphite_api +
            '/lib/python3.6/site-packages/graphite_api/finders/graphouse_api.py',
            content=StaticFile("graphouse_api.py"),
            mode=0755)
        File('/usr/lib/systemd/system/graphite-api.service',
             content=params.graphite_api_systemd_content,
             mode=0755)
        File('/etc/sysconfig/memcached',
             content=params.memcached_content,
             mode=0755)
        Execute('systemctl daemon-reload')
        Execute('systemctl enable graphite-api')
        Execute('systemctl enable memcached')
        Execute('systemctl start memcached')
Ejemplo n.º 20
0
def install_graphite_web():
    import params
    Directory([params.graphite_conf_dir, params.log_dir, params.pid_dir],
              owner=params.graphite_user,
              group=params.user_group,
              mode=0775,
              cd_access="a",
              create_parents=True)

    if not os.path.exists(params.install_dir_graphite_web):
        Execute('wget ' + params.download_url_graphite_web + ' -O /tmp/' +
                params.filename_graphite_web,
                user=params.graphite_user)
        Execute('tar -xf /tmp/' + params.filename_graphite_web + ' -C  ' +
                Script.get_stack_root())

        Execute(' rm -rf ' + params.install_dir_graphite_web + '/conf')
        Execute('ln -s ' + params.graphite_conf_dir + ' ' +
                params.install_dir_graphite_web + '/conf')

        Execute('chown -R %s:%s %s' % (params.graphite_user, params.user_group,
                                       params.install_dir_graphite_web))
        Execute('/bin/rm -f /tmp/' + params.filename_graphite_web)

        File(params.install_dir_graphite_web + '/webapp/graphite/graphouse.py',
             content=StaticFile("graphouse.py"),
             mode=0755)

        File('/usr/lib/systemd/system/graphite-web.service',
             content=params.graphite_web_systemd_content,
             mode=0755)
        File('/etc/sysconfig/memcached',
             content=params.memcached_content,
             mode=0755)
        Execute('systemctl daemon-reload')
        Execute('systemctl enable graphite-web')
        Execute('systemctl enable memcached')
        Execute('systemctl start memcached')
Ejemplo n.º 21
0
 def status(self, env):
     import params
     env.set_params(params)
     check_process_status(Script.get_stack_root() + '/atlassian/jira/work/jira.pid')
Ejemplo n.º 22
0
    False)
# ranger storm plugin end section

namenode_hosts = default("/clusterHostInfo/namenode_hosts", [])
has_namenode = not len(namenode_hosts) == 0

hdfs_user = config['configurations']['hadoop-env'][
    'hdfs_user'] if has_namenode else None
hdfs_user_keytab = config['configurations']['hadoop-env'][
    'hdfs_user_keytab'] if has_namenode else None
hdfs_principal_name = config['configurations']['hadoop-env'][
    'hdfs_principal_name'] if has_namenode else None
hdfs_site = config['configurations']['hdfs-site'] if has_namenode else None
default_fs = config['configurations']['core-site'][
    'fs.defaultFS'] if has_namenode else None
hadoop_bin_dir = Script.get_stack_root(
) + '/hadoop/bin/' if has_namenode else None
hadoop_conf_dir = '/etc/hadoop' if has_namenode else None
kinit_path_local = get_kinit_path(
    default('/configurations/kerberos-env/executable_search_paths', None))
dfs_type = default("/clusterLevelParams/dfs_type", "")

import functools

# create partial functions with common arguments for every HdfsResource call
# to create/delete hdfs directory/file/copyfromlocal we need to call params.HdfsResource in code
HdfsResource = functools.partial(
    HdfsResource,
    user=hdfs_user,
    hdfs_resource_ignore_file=
    "/var/lib/ambari-agent/data/.hdfs_resource_ignore",
    security_enabled=security_enabled,
Ejemplo n.º 23
0
limitations under the License.

"""

import os
from resource_management.libraries.script import Script
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.is_empty import is_empty
from resource_management.libraries.functions.get_bare_principal import get_bare_principal
from resource_management.core.exceptions import Fail
from resource_management.libraries.functions import get_kinit_path

config = Script.get_config()
tmp_dir = Script.get_tmp_dir()
stack_root = Script.get_stack_root()
current_host = config['agentLevelParams']['hostname']
kinit_path_local = get_kinit_path(
    default('/configurations/kerberos-env/executable_search_paths', None))
ugsync_keytab = config['configurations']['ranger-ugsync-site'][
    'ranger.usersync.kerberos.keytab']
ugsync_principal = config['configurations']['ranger-ugsync-site'][
    'ranger.usersync.kerberos.principal'].replace('_HOST',
                                                  current_host.lower())

install_dir_admin = Script.get_stack_root() + "/ranger-admin"

download_url_admin = default("/configurations/ranger-env/download_url_admin",
                             "")
filename_admin = download_url_admin.split('/')[-1]
version_dir_admin = filename_admin.replace('.tar.gz', '').replace('.tgz', '')
Ejemplo n.º 24
0
jdk_location = config['ambariLevelParams']['jdk_location']
dfs_type = default("/clusterLevelParams/dfs_type", "")
jdbc_jar_name = default("/ambariLevelParams/custom_mysql_jdbc_name", None)
hostname = config['agentLevelParams']['hostname']
ambari_cluster_name = config['clusterName']
java_version = expect("/hostLevelParams/java_version", int)
host_sys_prepped = default("/hostLevelParams/host_sys_prepped", False)

beacon_hosts = default("/clusterHostInfo/beacon_server_hosts", None)
if type(beacon_hosts) is list:
    beacon_host_name = beacon_hosts[0]
else:
    beacon_host_name = beacon_hosts

tmp_dir = Script.get_tmp_dir()
stack_root = Script.get_stack_root()

install_dir = stack_root + '/canal'
download_url = config['configurations']['beacon-env']['download_url']
filename = download_url.split('/')[-1]
version_dir = filename.replace('.tar.gz', '').replace('.tgz', '')
beacon_user = config['configurations']['beacon-env']['canal_user']

beacon_home_dir = stack_root + '/beacon'
beacon_root = beacon_home_dir
beacon_webapp_dir = beacon_home_dir + '/webapp'
beacon_home = beacon_home_dir
beacon_cluster_name = format('{ambari_cluster_name}')
credential_store_enabled = False
if 'credentialStoreEnabled' in config:
    credential_store_enabled = config['credentialStoreEnabled']
Ejemplo n.º 25
0
import os

from ambari_commons.constants import AMBARI_SUDO_BINARY
from ambari_commons.constants import LOGFEEDER_CONF_DIR
from resource_management.libraries.script import Script
from resource_management.libraries.script.script import get_config_lock_file
from resource_management.libraries.functions import default
from resource_management.libraries.functions import format_jvm_option
from resource_management.libraries.functions.version import format_stack_version, get_major_version
from string import lower

config = Script.get_config()
tmp_dir = Script.get_tmp_dir()

versioned_stack_root = Script.get_stack_root()

dfs_type = default("/clusterLevelParams/dfs_type", "")

is_parallel_execution_enabled = int(
    default("/agentConfigParams/agent/parallel_execution", 0)) == 1
host_sys_prepped = default("/ambariLevelParams/host_sys_prepped", False)

sudo = AMBARI_SUDO_BINARY

stack_version_unformatted = config['clusterLevelParams']['stack_version']
stack_version_formatted = format_stack_version(stack_version_unformatted)
major_stack_version = get_major_version(stack_version_formatted)

hadoop_home = versioned_stack_root + '/hadoop'
hadoop_libexec_dir = hadoop_home + "/libexec"
Ejemplo n.º 26
0
])

NON_CLIENT_SERVICES = [
    SERVICE.HIVE, SERVICE.STORM, SERVICE.FALCON, SERVICE.HBASE
]

from resource_management.core.resources import Execute

download_url_base = default("/configurations/cluster-env/download_url_base",
                            'http://assets.example.com/')

CONST_ATLAS_VERSION = default("/configurations/atlas-env/plugin_version",
                              '1.0.0')
CONST_DOWNLOAD_URL_BASE = download_url_base + '/atlas/'

stack_root = Script.get_stack_root().replace('/usr/hdp', '/opt')


def install_atlas_hook(hook_name):
    if not hook_name:
        return ''

    filename = 'atlas-' + CONST_ATLAS_VERSION + '-' + hook_name + '-plugin.tar.gz'
    download_url = CONST_DOWNLOAD_URL_BASE + filename
    version_dir = stack_root + '/atlas-' + CONST_ATLAS_VERSION + '-' + hook_name + '-plugin'
    install_dir = stack_root + '/atlas-' + hook_name + '-plugin'
    if not os.path.exists(version_dir):
        Execute('rm -rf %s' % install_dir)
        Execute('wget ' + download_url + ' -O /tmp/' + filename)
        Execute('tar -zxf /tmp/' + filename + ' -C ' + stack_root)
        Execute('ln -s ' + version_dir + ' ' + install_dir)
Ejemplo n.º 27
0
rest_hbase_configured_flag_file = status_params.rest_hbase_configured_flag_file
rest_hbase_acl_configured_flag_file = status_params.rest_hbase_acl_configured_flag_file
metron_knox_installed_flag_file = status_params.metron_knox_installed_flag_file
global_properties_template = config['configurations']['metron-env']['elasticsearch-properties']

# Elasticsearch hosts and port management
es_cluster_name = config['configurations']['metron-env']['es_cluster_name']
es_hosts = config['configurations']['metron-env']['es_hosts']
es_host_list = es_hosts.split(",")
es_http_port = config['configurations']['metron-env']['es_http_port']
es_url = ",".join([host + ":" + es_http_port for host in es_host_list])
es_http_url = es_host_list[0] + ":" + es_http_port
es_date_format = config['configurations']['metron-env']['es_date_format']

# hadoop params
stack_root = Script.get_stack_root()
# This is the cluster group named 'hadoop'. Its membership is the stack process user ids not individual users.
# The config name 'user_group' is out of our control and a bit misleading, so it is renamed to 'hadoop_group'.
hadoop_group = config['configurations']['cluster-env']['user_group']
hadoop_home_dir = stack_select.get_hadoop_dir("home")
hadoop_bin_dir = stack_select.get_hadoop_dir("bin")
hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
kafka_home = os.path.join(stack_root, "current", "kafka-broker")
kafka_bin_dir = os.path.join(kafka_home, "bin")

# zookeeper
zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
has_zk_host = not len(zk_hosts) == 0
zookeeper_quorum = None
if has_zk_host:
    if 'zoo.cfg' in config['configurations'] and 'clientPort' in config['configurations']['zoo.cfg']:
Ejemplo n.º 28
0
def install_jira():
    import params
    Directory([params.conf_dir], owner='jira', mode=0775, create_parents=True)
    if not os.path.exists(Script.get_stack_root() + '/atlassian/jira'):
        Execute('wget ' + params.download_url_jira + ' -O /tmp/jira.bin')
        Execute('sudo /tmp/jira.bin')
Ejemplo n.º 29
0
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import format_jvm_option
from resource_management.libraries.functions.is_empty import is_empty
from resource_management.libraries.functions.version import format_stack_version
from resource_management.libraries.functions.expect import expect
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.functions.stack_features import get_stack_feature_version
from resource_management.libraries.functions.get_architecture import get_architecture
from ambari_commons.constants import AMBARI_SUDO_BINARY
from resource_management.libraries.functions.namenode_ha_utils import get_properties_for_all_nameservices, namenode_federation_enabled

config = Script.get_config()
tmp_dir = Script.get_tmp_dir()

stack_root = Script.get_stack_root()

architecture = get_architecture()

dfs_type = default("/clusterLevelParams/dfs_type", "")

artifact_dir = format("{tmp_dir}/AMBARI-artifacts/")
jdk_name = default("/ambariLevelParams/jdk_name", None)
java_home = config['ambariLevelParams']['java_home']
java_version = expect("/ambariLevelParams/java_version", int)
jdk_location = config['ambariLevelParams']['jdk_location']

hadoop_custom_extensions_enabled = default(
    "/configurations/core-site/hadoop.custom-extensions.enabled", False)

sudo = AMBARI_SUDO_BINARY
Ejemplo n.º 30
0
See the License for the specific language governing permissions and
limitations under the License.

"""

import os
import sys
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions.get_kinit_path import get_kinit_path
from resource_management.libraries.script import Script
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.expect import expect
from resource_management.core.exceptions import Fail

config = Script.get_config()
stack_root = Script.get_stack_root()
install_dir = stack_root + '/sqoop'
download_url = config['configurations']['sqoop-env']['download_url']
filename = download_url.split('/')[-1]
version_dir = filename.replace('.tar.gz', '').replace('.tgz', '')

# Needed since this is an Atlas Hook service.
cluster_name = config['clusterName']

ambari_server_hostname = config['ambariLevelParams']['ambari_server_host']

stack_name = default("/clusterLevelParams/stack_name", None)

agent_stack_retry_on_unavailability = config['ambariLevelParams'][
    'agent_stack_retry_on_unavailability']
agent_stack_retry_count = expect("/ambariLevelParams/agent_stack_retry_count",