Exemplo n.º 1
0
def get_query_server_config(name='beeswax', server=None, cluster=None):
  LOG.debug("Query cluster %s: %s" % (name, cluster))


  cluster_config = get_cluster_config(cluster)

  if name == 'impala':
    from impala.dbms import get_query_server_config as impala_query_server_config
    query_server = impala_query_server_config(cluster_config=cluster_config)
  elif name == 'hms':
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'hms',
        'server_host': HIVE_METASTORE_HOST.get() if not cluster_config else cluster_config.get('server_host'),
        'server_port': HIVE_METASTORE_PORT.get(),
        'principal': kerberos_principal,
        'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
        'auth_username': AUTH_USERNAME.get(),
        'auth_password': AUTH_PASSWORD.get()
    }
  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax',
        'server_host': HIVE_SERVER_HOST.get() if not cluster_config else cluster_config.get('server_host'),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal,
        'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
            'protocol': 'https' if hiveserver2_use_ssl() else 'http',
            'host': HIVE_SERVER_HOST.get(),
            'port': hive_site.hiveserver2_thrift_http_port(),
            'end_point': hive_site.hiveserver2_thrift_http_path()
        },
        'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
        'auth_username': AUTH_USERNAME.get(),
        'auth_password': AUTH_PASSWORD.get()
    }

  if name == 'sparksql': # Spark SQL is almost the same as Hive
    from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

    query_server.update({
        'server_name': 'sparksql',
        'server_host': SPARK_SERVER_HOST.get(),
        'server_port': SPARK_SERVER_PORT.get()
    })

  debug_query_server = query_server.copy()
  debug_query_server['auth_password_used'] = bool(debug_query_server.pop('auth_password'))
  LOG.debug("Query Server: %s" % debug_query_server)

  return query_server
Exemplo n.º 2
0
Arquivo: dbms.py Projeto: cloudera/hue
def get_query_server_config(name='beeswax', server=None, cluster=None):
  LOG.debug("Query cluster %s: %s" % (name, cluster))

  cluster_config = get_cluster_config(cluster)

  if name == 'impala':
    from impala.dbms import get_query_server_config as impala_query_server_config
    query_server = impala_query_server_config(cluster_config=cluster_config)
  elif name == 'hms':
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())
    query_server = {
        'server_name': 'hms',
        'server_host': HIVE_METASTORE_HOST.get() if not cluster_config else cluster_config.get('server_host'),
        'server_port': HIVE_METASTORE_PORT.get(),
        'principal': kerberos_principal,
        'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
        'auth_username': AUTH_USERNAME.get(),
        'auth_password': AUTH_PASSWORD.get()
    }
  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())
    query_server = {
        'server_name': 'beeswax',
        'server_host': HIVE_SERVER_HOST.get() if not cluster_config else cluster_config.get('server_host'),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal,
        'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
            'protocol': 'https' if hiveserver2_use_ssl() else 'http',
            'host': HIVE_SERVER_HOST.get(),
            'port': hive_site.hiveserver2_thrift_http_port(),
            'end_point': hive_site.hiveserver2_thrift_http_path()
        },
        'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
        'auth_username': AUTH_USERNAME.get(),
        'auth_password': AUTH_PASSWORD.get()
    }

  if name == 'sparksql': # Spark SQL is almost the same as Hive
    from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

    query_server.update({
        'server_name': 'sparksql',
        'server_host': SPARK_SERVER_HOST.get(),
        'server_port': SPARK_SERVER_PORT.get()
    })

  debug_query_server = query_server.copy()
  debug_query_server['auth_password_used'] = bool(debug_query_server.pop('auth_password'))
  LOG.debug("Query Server: %s" % debug_query_server)

  return query_server
Exemplo n.º 3
0
Arquivo: dbms.py Projeto: hysteam/hue
def get_query_server_config(name='beeswax', server=None):
  if name == 'impala':
    from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
        IMPALA_PRINCIPAL, IMPERSONATION_ENABLED, QUERYCACHE_ROWS, QUERY_TIMEOUT_S

    query_server = {
        'server_name': 'impala',
        'server_host': IMPALA_SERVER_HOST.get(),
        'server_port': IMPALA_SERVER_PORT.get(),
        'principal': IMPALA_PRINCIPAL.get(),
        'impersonation_enabled': IMPERSONATION_ENABLED.get(),
        'querycache_rows': QUERYCACHE_ROWS.get(),
        'QUERY_TIMEOUT_S': QUERY_TIMEOUT_S.get(),
    }
  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax', # Aka HiveServer2 now
        'server_host': HIVE_SERVER_HOST.get(),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal,
        'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
            'protocol': 'https' if hiveserver2_use_ssl() else 'http',
            'host': HIVE_SERVER_HOST.get(),
            'port': hive_site.hiveserver2_thrift_http_port(),
            'end_point': hive_site.hiveserver2_thrift_http_path()
        },
        'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
    }

  LOG.debug("Query Server: %s" % query_server)

  return query_server
Exemplo n.º 4
0
Arquivo: dbms.py Projeto: jounex/hue
def get_query_server_config(name='beeswax', server=None):
  if name == 'impala':
    from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
        IMPALA_PRINCIPAL, IMPERSONATION_ENABLED, QUERYCACHE_ROWS, QUERY_TIMEOUT_S

    query_server = {
        'server_name': 'impala',
        'server_host': IMPALA_SERVER_HOST.get(),
        'server_port': IMPALA_SERVER_PORT.get(),
        'principal': IMPALA_PRINCIPAL.get(),
        'impersonation_enabled': IMPERSONATION_ENABLED.get(),
        'querycache_rows': QUERYCACHE_ROWS.get(),
        'QUERY_TIMEOUT_S': QUERY_TIMEOUT_S.get(),
    }
  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax', # Aka HiveServer2 now
        'server_host': HIVE_SERVER_HOST.get(),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal,
        'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
            'protocol': 'https' if hiveserver2_use_ssl() else 'http',
            'host': HIVE_SERVER_HOST.get(),
            'port': hive_site.hiveserver2_thrift_http_port(),
            'end_point': hive_site.hiveserver2_thrift_http_path()
        },
        'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
    }

  LOG.debug("Query Server: %s" % query_server)

  return query_server
Exemplo n.º 5
0
def get_query_server_config(name='beeswax'):
  if name == 'impala':
    from impala.conf import SERVER_HOST, SERVER_PORT, IMPALA_PRINCIPAL, SERVER_INTERFACE as IMPALA_SERVER_INTERFACE
    # Backward compatibility until Hue 3.0
    # If no interface specified and port is beeswax, switch port to HS2 default as we want to use HS2 from now on
    if IMPALA_SERVER_INTERFACE.get() == 'hiveserver2' and SERVER_PORT.get() == 21000:
      port = 21050
    else:
      port = SERVER_PORT.get()
    query_server = {
        'server_name': 'impala',
        'server_host': SERVER_HOST.get(),
        'server_port': port,
        'server_interface': IMPALA_SERVER_INTERFACE.get(),
        'principal': IMPALA_PRINCIPAL.get(),
    }
  else:
    if SERVER_INTERFACE.get() == 'hiveserver2':
      kerberos_principal = hive_site.get_hiveserver2_kerberos_principal()
    else:
      # Beeswaxd runs as 'hue'
      kerberos_principal = KERBEROS.HUE_PRINCIPAL.get()

    query_server = {
        'server_name': 'beeswax',
        'server_host': BEESWAX_SERVER_HOST.get(),
        'server_port': BEESWAX_SERVER_PORT.get(),
        'server_interface': SERVER_INTERFACE.get(),
        'principal': kerberos_principal
    }
    LOG.debug("Query Server:\n\tName: %(server_name)s\n\tHost: %(server_host)s\n\tPort: %(server_port)s\n\tInterface: %(server_interface)s\n\tKerberos Principal: %(principal)s" % query_server)

  return query_server
Exemplo n.º 6
0
  def get_properties(self, hive_properties=None):
    credentials = {}
    from beeswax import hive_site, conf

    if hive_properties is None:
      hive_properties = hive_site.get_metastore()
      if hive_properties:
        hive_properties['hive2.server.principal'] = hive_site.get_hiveserver2_kerberos_principal(conf.HIVE_SERVER_HOST.get())

    if not hive_properties:
      hive_properties = {}
      LOG.warn('Could not get all the Oozie credentials: hive-site.xml required on the Hue host.')

    credentials[self.hive_name] = {
      'xml_name': self.hive_name,
      'properties': [
         ('hcat.metastore.uri', hive_properties.get('thrift_uri')),
         ('hcat.metastore.principal', hive_properties.get('kerberos_principal')),
      ]
    }

    credentials[self.hiveserver2_name] = {
      'xml_name': self.hiveserver2_name,
      'properties': [
         ('hive2.jdbc.url', hive_site.hiveserver2_jdbc_url()),
         ('hive2.server.principal', hive_properties.get('hive2.server.principal')),
      ]
    }

    credentials[self.hbase_name] = {
      'xml_name': self.hbase_name,
      'properties': []
    }

    return credentials
Exemplo n.º 7
0
def get_query_server_config(name="beeswax"):
    if name == "impala":
        from impala.conf import SERVER_HOST, SERVER_PORT, IMPALA_PRINCIPAL, SERVER_INTERFACE as IMPALA_SERVER_INTERFACE

        # Backward compatibility until Hue 3.0
        # If no interface specified and port is beeswax, switch port to HS2 default as we want to use HS2 from now on
        if IMPALA_SERVER_INTERFACE.get() == "hiveserver2" and SERVER_PORT.get() == 21000:
            port = 21050
        else:
            port = SERVER_PORT.get()
        query_server = {
            "server_name": "impala",
            "server_host": SERVER_HOST.get(),
            "server_port": port,
            "server_interface": IMPALA_SERVER_INTERFACE.get(),
            "principal": IMPALA_PRINCIPAL.get(),
        }
    else:
        if SERVER_INTERFACE.get() == "hiveserver2":
            kerberos_principal = hive_site.get_hiveserver2_kerberos_principal()
        else:
            # Beeswaxd runs as 'hue'
            kerberos_principal = KERBEROS.HUE_PRINCIPAL.get()

        query_server = {
            "server_name": "beeswax",  # Aka HS2 too
            "server_host": BEESWAX_SERVER_HOST.get(),
            "server_port": BEESWAX_SERVER_PORT.get(),
            "server_interface": SERVER_INTERFACE.get(),
            "principal": kerberos_principal,
        }
        LOG.debug("Query Server: %s" % query_server)

    return query_server
Exemplo n.º 8
0
def get_query_server_config(name='beeswax', server=None):
  if name == 'impala':
    from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
        IMPALA_PRINCIPAL, IMPERSONATION_ENABLED, QUERYCACHE_ROWS, QUERY_TIMEOUT_S

    query_server = {
        'server_name': 'impala',
        'server_host': IMPALA_SERVER_HOST.get(),
        'server_port': IMPALA_SERVER_PORT.get(),
        'principal': IMPALA_PRINCIPAL.get(),
        'impersonation_enabled': IMPERSONATION_ENABLED.get(),
        'querycache_rows': QUERYCACHE_ROWS.get(),
        'QUERY_TIMEOUT_S': QUERY_TIMEOUT_S.get(),
    }
  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax', # Aka HiveServer2 now
        'server_host': HIVE_SERVER_HOST.get(),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal
    }

  LOG.debug("Query Server: %s" % query_server)

  return query_server
Exemplo n.º 9
0
def get_query_server_config(name='beeswax', server=None):
    if name == 'impala':
        from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
            IMPALA_PRINCIPAL, IMPERSONATION_ENABLED, QUERYCACHE_ROWS, QUERY_TIMEOUT_S, AUTH_USERNAME as IMPALA_AUTH_USERNAME, AUTH_PASSWORD as IMPALA_AUTH_PASSWORD

        query_server = {
            'server_name': 'impala',
            'server_host': IMPALA_SERVER_HOST.get(),
            'server_port': IMPALA_SERVER_PORT.get(),
            'principal': IMPALA_PRINCIPAL.get(),
            'impersonation_enabled': IMPERSONATION_ENABLED.get(),
            'querycache_rows': QUERYCACHE_ROWS.get(),
            'QUERY_TIMEOUT_S': QUERY_TIMEOUT_S.get(),
            'auth_username': IMPALA_AUTH_USERNAME.get(),
            'auth_password': IMPALA_AUTH_PASSWORD.get()
        }
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(
            HIVE_SERVER_HOST.get())

        query_server = {
            'server_name':
            'beeswax',  # Aka HiveServer2 now
            'server_host':
            HIVE_SERVER_HOST.get(),
            'server_port':
            HIVE_SERVER_PORT.get(),
            'principal':
            kerberos_principal,
            'http_url':
            '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
                'protocol': 'https' if hiveserver2_use_ssl() else 'http',
                'host': HIVE_SERVER_HOST.get(),
                'port': hive_site.hiveserver2_thrift_http_port(),
                'end_point': hive_site.hiveserver2_thrift_http_path()
            },
            'transport_mode':
            'http'
            if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
            'auth_username':
            AUTH_USERNAME.get(),
            'auth_password':
            AUTH_PASSWORD.get()
        }

    if name == 'sparksql':  # Spark SQL is almost the same as Hive
        from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

        query_server.update({
            'server_name': 'sparksql',
            'server_host': SPARK_SERVER_HOST.get(),
            'server_port': SPARK_SERVER_PORT.get()
        })

    debug_query_server = query_server.copy()
    debug_query_server['auth_password_used'] = bool(
        debug_query_server.pop('auth_password'))
    LOG.debug("Query Server: %s" % debug_query_server)

    return query_server
Exemplo n.º 10
0
def get_query_server_config(name='beeswax', server=None):
    if name == 'impala':
        from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
            IMPALA_PRINCIPAL, IMPERSONATION_ENABLED, QUERYCACHE_ROWS, QUERY_TIMEOUT_S

        query_server = {
            'server_name': 'impala',
            'server_host': IMPALA_SERVER_HOST.get(),
            'server_port': IMPALA_SERVER_PORT.get(),
            'principal': IMPALA_PRINCIPAL.get(),
            'impersonation_enabled': IMPERSONATION_ENABLED.get(),
            'querycache_rows': QUERYCACHE_ROWS.get(),
            'QUERY_TIMEOUT_S': QUERY_TIMEOUT_S.get(),
        }
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(
            HIVE_SERVER_HOST.get())

        query_server = {
            'server_name': 'beeswax',  # Aka HiveServer2 now
            'server_host': HIVE_SERVER_HOST.get(),
            'server_port': HIVE_SERVER_PORT.get(),
            'principal': kerberos_principal
        }

    LOG.debug("Query Server: %s" % query_server)

    return query_server
Exemplo n.º 11
0
Arquivo: dbms.py Projeto: RoxC/hue
def get_query_server_config(name="beeswax", server=None):
    if name == "impala":
        from impala.conf import (
            SERVER_HOST as IMPALA_SERVER_HOST,
            SERVER_PORT as IMPALA_SERVER_PORT,
            IMPALA_PRINCIPAL,
            IMPERSONATION_ENABLED,
            QUERYCACHE_ROWS,
            QUERY_TIMEOUT_S,
            AUTH_USERNAME as IMPALA_AUTH_USERNAME,
            AUTH_PASSWORD as IMPALA_AUTH_PASSWORD,
        )

        query_server = {
            "server_name": "impala",
            "server_host": IMPALA_SERVER_HOST.get(),
            "server_port": IMPALA_SERVER_PORT.get(),
            "principal": IMPALA_PRINCIPAL.get(),
            "impersonation_enabled": IMPERSONATION_ENABLED.get(),
            "querycache_rows": QUERYCACHE_ROWS.get(),
            "QUERY_TIMEOUT_S": QUERY_TIMEOUT_S.get(),
            "auth_username": IMPALA_AUTH_USERNAME.get(),
            "auth_password": IMPALA_AUTH_PASSWORD.get(),
        }
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

        query_server = {
            "server_name": "beeswax",  # Aka HiveServer2 now
            "server_host": HIVE_SERVER_HOST.get(),
            "server_port": HIVE_SERVER_PORT.get(),
            "principal": kerberos_principal,
            "http_url": "%(protocol)s://%(host)s:%(port)s/%(end_point)s"
            % {
                "protocol": "https" if hiveserver2_use_ssl() else "http",
                "host": HIVE_SERVER_HOST.get(),
                "port": hive_site.hiveserver2_thrift_http_port(),
                "end_point": hive_site.hiveserver2_thrift_http_path(),
            },
            "transport_mode": "http" if hive_site.hiveserver2_transport_mode() == "HTTP" else "socket",
            "auth_username": AUTH_USERNAME.get(),
            "auth_password": AUTH_PASSWORD.get(),
        }

    if name == "sparksql":  # Spark SQL is almost the same as Hive
        from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

        query_server.update(
            {"server_name": "sparksql", "server_host": SPARK_SERVER_HOST.get(), "server_port": SPARK_SERVER_PORT.get()}
        )

    debug_query_server = query_server.copy()
    debug_query_server["auth_password_used"] = bool(debug_query_server.pop("auth_password"))
    LOG.debug("Query Server: %s" % debug_query_server)

    return query_server
Exemplo n.º 12
0
def get_query_server_config(name='beeswax', server=None):
  if name == 'impala':
    from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
        IMPALA_PRINCIPAL, IMPERSONATION_ENABLED, QUERYCACHE_ROWS, QUERY_TIMEOUT_S, AUTH_USERNAME as IMPALA_AUTH_USERNAME, AUTH_PASSWORD as IMPALA_AUTH_PASSWORD, \
        SESSION_TIMEOUT_S

    query_server = {
        'server_name': 'impala',
        'server_host': IMPALA_SERVER_HOST.get(),
        'server_port': IMPALA_SERVER_PORT.get(),
        'principal': IMPALA_PRINCIPAL.get(),
        'impersonation_enabled': IMPERSONATION_ENABLED.get(),
        'querycache_rows': QUERYCACHE_ROWS.get(),
        'QUERY_TIMEOUT_S': QUERY_TIMEOUT_S.get(),
        'SESSION_TIMEOUT_S': SESSION_TIMEOUT_S.get(),
        'auth_username': IMPALA_AUTH_USERNAME.get(),
        'auth_password': IMPALA_AUTH_PASSWORD.get()
    }
  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax', # Aka HiveServer2 now
        'server_host': HIVE_SERVER_HOST.get(),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal,
        'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
            'protocol': 'https' if hiveserver2_use_ssl() else 'http',
            'host': HIVE_SERVER_HOST.get(),
            'port': hive_site.hiveserver2_thrift_http_port(),
            'end_point': hive_site.hiveserver2_thrift_http_path()
        },
        'transport_mode': 'http' if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
        'auth_username': AUTH_USERNAME.get(),
        'auth_password': AUTH_PASSWORD.get()
    }

  if name == 'sparksql': # Spark SQL is almost the same as Hive
    from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

    query_server.update({
        'server_name': 'sparksql',
        'server_host': SPARK_SERVER_HOST.get(),
        'server_port': SPARK_SERVER_PORT.get()
    })

  debug_query_server = query_server.copy()
  debug_query_server['auth_password_used'] = bool(debug_query_server.pop('auth_password'))
  LOG.debug("Query Server: %s" % debug_query_server)

  return query_server
Exemplo n.º 13
0
    def get_properties(self, hive_properties=None):
        credentials = {}
        from beeswax import hive_site, conf

        if not hasattr(conf.HIVE_SERVER_HOST,
                       'get') or not conf.HIVE_SERVER_HOST.get():
            LOG.warn(
                'Could not get all the Oozie credentials: beeswax app is blacklisted.'
            )
        else:
            if hive_properties is None:
                hive_properties = hive_site.get_metastore()
                if hive_properties:
                    hive_properties[
                        'hive2.server.principal'] = hive_site.get_hiveserver2_kerberos_principal(
                            conf.HIVE_SERVER_HOST.get())

            if not hive_properties:
                hive_properties = {}
                LOG.warn(
                    'Could not get all the Oozie credentials: hive-site.xml required on the Hue host.'
                )

            credentials[self.hive_name] = {
                'xml_name':
                self.hive_name,
                'properties': [
                    ('hcat.metastore.uri', hive_properties.get('thrift_uri')),
                    ('hcat.metastore.principal',
                     hive_properties.get('kerberos_principal')),
                ]
            }

            credentials[self.hiveserver2_name] = {
                'xml_name':
                self.hiveserver2_name,
                'properties': [
                    ('hive2.jdbc.url', hive_site.hiveserver2_jdbc_url()),
                    ('hive2.server.principal',
                     hive_properties.get('hive2.server.principal')),
                ]
            }

        credentials[self.hbase_name] = {
            'xml_name': self.hbase_name,
            'properties': []
        }

        return credentials
Exemplo n.º 14
0
Arquivo: dbms.py Projeto: jackesh/hue
def get_query_server_config(name='beeswax', server=None):
  if name == 'impala':
    from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, \
        IMPALA_PRINCIPAL, IMPERSONATION_ENABLED

    query_server = {
        'server_name': 'impala',
        'server_host': IMPALA_SERVER_HOST.get(),
        'server_port': IMPALA_SERVER_PORT.get(),
        'principal': IMPALA_PRINCIPAL.get(),
        'impersonation_enabled': IMPERSONATION_ENABLED.get()
    }
  elif name == 'rdbms':
    from rdbms.conf import RDBMS

    if not server or server not in RDBMS:
      keys = RDBMS.keys()
      name = keys and keys[0] or None
    else:
      name = server

    if name:
      query_server = {
        'server_name': RDBMS[name].ENGINE.get().split('.')[-1],
        'server_host': RDBMS[name].HOST.get(),
        'server_port': RDBMS[name].PORT.get(),
        'username': RDBMS[name].USER.get(),
        'password': RDBMS[name].PASSWORD.get(),
        'password': RDBMS[name].PASSWORD.get(),
        'alias': name
      }
    else:
      query_server = {}

  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax', # Aka HiveServer2 now
        'server_host': HIVE_SERVER_HOST.get(),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal
    }

  LOG.debug("Query Server: %s" % query_server)

  return query_server
Exemplo n.º 15
0
def get_query_server_config(name='beeswax', server=None):
  kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

  query_server = {
    'server_name': 'beeswax', # Aka HiveServer2 now
    'server_host': HIVE_SERVER_HOST.get(),
    'server_port': HIVE_SERVER_PORT.get(),
    'principal': kerberos_principal,
    'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
        'protocol': 'https' if SSL.ENABLED.get() else 'http',
        'host': HIVE_SERVER_HOST.get(),
        'port': hive_site.hiveserver2_thrift_http_port(),
        'end_point': hive_site.hiveserver2_thrift_http_path()
    },
    'transport_mode': hive_site.hiveserver2_transport_mode(),
  }

  LOG.debug("Query Server: %s" % query_server)

  return query_server
Exemplo n.º 16
0
def get_query_server_config(name="beeswax", server=None):
    if name == "impala":
        from impala.conf import (
            SERVER_HOST as IMPALA_SERVER_HOST,
            SERVER_PORT as IMPALA_SERVER_PORT,
            IMPALA_PRINCIPAL,
            IMPERSONATION_ENABLED,
            QUERYCACHE_ROWS,
            QUERY_TIMEOUT_S,
        )

        query_server = {
            "server_name": "impala",
            "server_host": IMPALA_SERVER_HOST.get(),
            "server_port": IMPALA_SERVER_PORT.get(),
            "principal": IMPALA_PRINCIPAL.get(),
            "impersonation_enabled": IMPERSONATION_ENABLED.get(),
            "querycache_rows": QUERYCACHE_ROWS.get(),
            "QUERY_TIMEOUT_S": QUERY_TIMEOUT_S.get(),
        }
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

        query_server = {
            "server_name": "beeswax",  # Aka HiveServer2 now
            "server_host": HIVE_SERVER_HOST.get(),
            "server_port": HIVE_SERVER_PORT.get(),
            "principal": kerberos_principal,
            "http_url": "%(protocol)s://%(host)s:%(port)s/%(end_point)s"
            % {
                "protocol": "https" if hiveserver2_use_ssl() else "http",
                "host": HIVE_SERVER_HOST.get(),
                "port": hive_site.hiveserver2_thrift_http_port(),
                "end_point": hive_site.hiveserver2_thrift_http_path(),
            },
            "transport_mode": "http" if hive_site.hiveserver2_transport_mode() == "HTTP" else "socket",
        }

    LOG.debug("Query Server: %s" % query_server)

    return query_server
Exemplo n.º 17
0
def get_query_server_config(name='beeswax', server=None):
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(
        HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax',  # Aka HiveServer2 now
        'server_host': HIVE_SERVER_HOST.get(),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal,
        'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
            'protocol': 'https' if SSL.ENABLED.get() else 'http',
            'host': HIVE_SERVER_HOST.get(),
            'port': hive_site.hiveserver2_thrift_http_port(),
            'end_point': hive_site.hiveserver2_thrift_http_path()
        },
        'transport_mode': hive_site.hiveserver2_transport_mode(),
    }

    LOG.debug("Query Server: %s" % query_server)

    return query_server
Exemplo n.º 18
0
def get_query_server_config(name='beeswax'):
  if name == 'impala':
    from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, IMPALA_PRINCIPAL

    query_server = {
        'server_name': 'impala',
        'server_host': IMPALA_SERVER_HOST.get(),
        'server_port': IMPALA_SERVER_PORT.get(),
        'principal': IMPALA_PRINCIPAL.get(),
    }
  else:
    kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

    query_server = {
        'server_name': 'beeswax', # Aka HiveServer2 now
        'server_host': HIVE_SERVER_HOST.get(),
        'server_port': HIVE_SERVER_PORT.get(),
        'principal': kerberos_principal
    }
    LOG.debug("Query Server: %s" % query_server)

  return query_server
Exemplo n.º 19
0
def get_query_server_config(name='beeswax'):
    if name == 'impala':
        from impala.conf import SERVER_HOST as IMPALA_SERVER_HOST, SERVER_PORT as IMPALA_SERVER_PORT, IMPALA_PRINCIPAL

        query_server = {
            'server_name': 'impala',
            'server_host': IMPALA_SERVER_HOST.get(),
            'server_port': IMPALA_SERVER_PORT.get(),
            'principal': IMPALA_PRINCIPAL.get(),
        }
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(
            HIVE_SERVER_HOST.get())

        query_server = {
            'server_name': 'beeswax',  # Aka HiveServer2 now
            'server_host': HIVE_SERVER_HOST.get(),
            'server_port': HIVE_SERVER_PORT.get(),
            'principal': kerberos_principal
        }
        LOG.debug("Query Server: %s" % query_server)

    return query_server
Exemplo n.º 20
0
Arquivo: dbms.py Projeto: fnerdwq/hue
def get_query_server_config(name="beeswax", server=None):
    if name == "impala":
        from impala.dbms import get_query_server_config as impala_query_server_config

        query_server = impala_query_server_config()
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())

        query_server = {
            "server_name": "beeswax",  # Aka HiveServer2 now
            "server_host": HIVE_SERVER_HOST.get(),
            "server_port": HIVE_SERVER_PORT.get(),
            "principal": kerberos_principal,
            "http_url": "%(protocol)s://%(host)s:%(port)s/%(end_point)s"
            % {
                "protocol": "https" if hiveserver2_use_ssl() else "http",
                "host": HIVE_SERVER_HOST.get(),
                "port": hive_site.hiveserver2_thrift_http_port(),
                "end_point": hive_site.hiveserver2_thrift_http_path(),
            },
            "transport_mode": "http" if hive_site.hiveserver2_transport_mode() == "HTTP" else "socket",
            "auth_username": AUTH_USERNAME.get(),
            "auth_password": AUTH_PASSWORD.get(),
        }

    if name == "sparksql":  # Spark SQL is almost the same as Hive
        from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

        query_server.update(
            {"server_name": "sparksql", "server_host": SPARK_SERVER_HOST.get(), "server_port": SPARK_SERVER_PORT.get()}
        )

    debug_query_server = query_server.copy()
    debug_query_server["auth_password_used"] = bool(debug_query_server.pop("auth_password"))
    LOG.debug("Query Server: %s" % debug_query_server)

    return query_server
Exemplo n.º 21
0
def get_query_server_config(name='beeswax', connector=None):
  if connector and has_connectors(): # TODO: Give empty connector when no connector in use
    LOG.debug("Query via connector %s" % name)
    query_server = get_query_server_config_via_connector(connector)
  else:
    LOG.debug("Query via ini %s" % name)
    if name == "llap":
      activeEndpoint = cache.get('llap')
      if activeEndpoint is None:
        if HIVE_DISCOVERY_LLAP.get():
          LOG.debug("Checking zookeeper for discovering Hive LLAP server endpoint")
          zk = KazooClient(hosts=libzookeeper_conf.ENSEMBLE.get(), read_only=True)
          zk.start()
          if HIVE_DISCOVERY_LLAP_HA.get():
            znode = "{0}/instances".format(HIVE_DISCOVERY_LLAP_ZNODE.get())
            LOG.debug("Setting up Hive LLAP HA with the following node {0}".format(znode))
            if zk.exists(znode):
              hiveservers = zk.get_children(znode)
              if not hiveservers:
                raise PopupException(_('There is no running Hive LLAP server available'))
              LOG.info("Available Hive LLAP servers: {0}".format(hiveservers))
              for server in hiveservers:
                llap_servers = json.loads(zk.get("{0}/{1}".format(znode, server))[0])["internal"][0]
                if llap_servers["api"] == "activeEndpoint":
                  LOG.info("Selecting Hive LLAP server: {0}".format(llap_servers))
                  cache.set(
                    "llap",
                    json.dumps({
                        "host": llap_servers["addresses"][0]["host"],
                        "port": llap_servers["addresses"][0]["port"]
                      }),
                      CACHE_TIMEOUT.get()
                  )
            else:
              LOG.error("Hive LLAP endpoint not found, reverting to config values")
              cache.set("llap", json.dumps({"host": HIVE_SERVER_HOST.get(), "port": HIVE_HTTP_THRIFT_PORT.get()}), CACHE_TIMEOUT.get())
          else:
            znode = "{0}".format(HIVE_DISCOVERY_LLAP_ZNODE.get())
            LOG.debug("Setting up Hive LLAP with the following node {0}".format(znode))
            if zk.exists(znode):
              hiveservers = zk.get_children(znode)
              for server in hiveservers:
                cache.set(
                  "llap",
                  json.dumps({
                    "host": server.split(';')[0].split('=')[1].split(":")[0],
                    "port": server.split(';')[0].split('=')[1].split(":")[1]
                  })
                )
          zk.stop()
        else:
          LOG.debug("Zookeeper discovery not enabled, reverting to config values")
          cache.set("llap", json.dumps({"host": LLAP_SERVER_HOST.get(), "port": LLAP_SERVER_THRIFT_PORT.get()}), CACHE_TIMEOUT.get())

      activeEndpoint = json.loads(cache.get("llap"))

    elif name != 'hms' and name != 'impala':
      activeEndpoint = cache.get("hiveserver2")
      if activeEndpoint is None:
        if HIVE_DISCOVERY_HS2.get():
          hiveservers = get_zk_hs2()
          LOG.debug("Available Hive Servers: {0}".format(hiveservers))
          if not hiveservers:
            raise PopupException(_('There is no running Hive server available'))
          server_to_use = 0
          LOG.debug("Selected Hive server {0}: {1}".format(server_to_use, hiveservers[server_to_use]))
          cache.set(
            "hiveserver2",
            json.dumps({
              "host": hiveservers[server_to_use].split(";")[0].split("=")[1].split(":")[0],
              "port": hiveservers[server_to_use].split(";")[0].split("=")[1].split(":")[1]
            })
          )
        else:
          cache.set("hiveserver2", json.dumps({"host": HIVE_SERVER_HOST.get(), "port": HIVE_HTTP_THRIFT_PORT.get()}))
      else:
        # Setting hs2 cache in-case there is no HS2 discovery
        cache.set("hiveserver2", json.dumps({"host": HIVE_SERVER_HOST.get(), "port": HIVE_HTTP_THRIFT_PORT.get()}))
        if HIVE_DISCOVERY_HS2.get():
          # Replace ActiveEndpoint if the current HS2 is down
          hiveservers = get_zk_hs2()
          if hiveservers is not None:
            server_to_use = 0
            hs2_host_name = hiveservers[server_to_use].split(";")[0].split("=")[1].split(":")[0]
            hs2_in_active_endpoint = hs2_host_name in activeEndpoint
            LOG.debug("Is the current HS2 active {0}".format(hs2_in_active_endpoint))
            if not hs2_in_active_endpoint:
              LOG.error(
                'Current HiveServer is down, working to connect with the next available HiveServer from Zookeeper')
              reset_ha()
              server_to_use = 0
              LOG.debug("Selected HiveServer {0}: {1}".format(server_to_use, hiveservers[server_to_use]))
              cache.set(
                "hiveserver2",
                json.dumps({
                  "host": hiveservers[server_to_use].split(";")[0].split("=")[1].split(":")[0],
                  "port": hiveservers[server_to_use].split(";")[0].split("=")[1].split(":")[1]
                })
              )

      activeEndpoint = json.loads(cache.get("hiveserver2"))

    if name == 'impala':
      from impala.dbms import get_query_server_config as impala_query_server_config
      query_server = impala_query_server_config()
    elif name == 'hms':
      kerberos_principal = get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())
      query_server = {
          'server_name': 'hms',
          'server_host': HIVE_METASTORE_HOST.get() if not cluster_config else cluster_config.get('server_host'),
          'server_port': HIVE_METASTORE_PORT.get(),
          'principal': kerberos_principal,
          'transport_mode': 'http' if hiveserver2_transport_mode() == 'HTTP' else 'socket',
          'auth_username': AUTH_USERNAME.get(),
          'auth_password': AUTH_PASSWORD.get(),
          'use_sasl': HIVE_USE_SASL.get()
      }
    else:
      kerberos_principal = get_hiveserver2_kerberos_principal(HIVE_SERVER_HOST.get())
      query_server = {
          'server_name': 'beeswax' if name != 'hplsql' else 'hplsql',
          'server_host': activeEndpoint["host"],
          'server_port': LLAP_SERVER_PORT.get() if name == 'llap' else HIVE_SERVER_PORT.get(),
          'principal': kerberos_principal,
          'http_url': '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
              'protocol': 'https' if hiveserver2_use_ssl() else 'http',
              'host': activeEndpoint["host"],
              'port': activeEndpoint["port"],
              'end_point': hiveserver2_thrift_http_path()
            },
          'transport_mode': 'http' if hiveserver2_transport_mode() == 'HTTP' else 'socket',
          'auth_username': AUTH_USERNAME.get(),
          'auth_password': AUTH_PASSWORD.get(),
          'use_sasl': HIVE_USE_SASL.get(),
          'close_sessions': CLOSE_SESSIONS.get(),
          'has_session_pool': has_session_pool(),
          'max_number_of_sessions': MAX_NUMBER_OF_SESSIONS.get()
        }

    if name == 'sparksql':  # Extends Hive as very similar
      from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT, USE_SASL as SPARK_USE_SASL

      query_server.update({
          'server_name': 'sparksql',
          'server_host': SPARK_SERVER_HOST.get(),
          'server_port': SPARK_SERVER_PORT.get(),
          'use_sasl': SPARK_USE_SASL.get()
      })

  if not query_server.get('dialect'):
    query_server['dialect'] = query_server['server_name']

  debug_query_server = query_server.copy()
  debug_query_server['auth_password_used'] = bool(debug_query_server.pop('auth_password', None))
  LOG.debug("Query Server: %s" % debug_query_server)

  return query_server
Exemplo n.º 22
0
def get_query_server_config(name='beeswax', server=None, cluster=None):
    LOG.debug("Query cluster: %s" % cluster)

    if cluster and cluster.get('id') != CLUSTER_ID.get():
        if 'altus:dataware:k8s' in cluster['id']:
            compute_end_point = cluster['compute_end_point'][0] if type(
                cluster['compute_end_point']) == list else cluster[
                    'compute_end_point']  # TODO getting list from left assist
            cluster_config = {
                'server_host': compute_end_point,
                'name': cluster['name']
            }  # TODO get port too
        else:
            cluster_config = Cluster(user=None).get_config(
                cluster['id'])  # Direct cluster
    else:
        cluster_config = None

    if name == 'impala':
        from impala.dbms import get_query_server_config as impala_query_server_config
        query_server = impala_query_server_config(
            cluster_config=cluster_config)
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(
            HIVE_SERVER_HOST.get())

        query_server = {
            'server_name':
            'beeswax',
            'server_host':
            HIVE_SERVER_HOST.get()
            if not cluster_config else cluster_config.get('server_host'),
            'server_port':
            HIVE_SERVER_PORT.get(),
            'principal':
            kerberos_principal,
            'http_url':
            '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
                'protocol': 'https' if hiveserver2_use_ssl() else 'http',
                'host': HIVE_SERVER_HOST.get(),
                'port': hive_site.hiveserver2_thrift_http_port(),
                'end_point': hive_site.hiveserver2_thrift_http_path()
            },
            'transport_mode':
            'http'
            if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
            'auth_username':
            AUTH_USERNAME.get(),
            'auth_password':
            AUTH_PASSWORD.get()
        }

    if name == 'sparksql':  # Spark SQL is almost the same as Hive
        from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

        query_server.update({
            'server_name': 'sparksql',
            'server_host': SPARK_SERVER_HOST.get(),
            'server_port': SPARK_SERVER_PORT.get()
        })

    debug_query_server = query_server.copy()
    debug_query_server['auth_password_used'] = bool(
        debug_query_server.pop('auth_password'))
    LOG.debug("Query Server: %s" % debug_query_server)

    return query_server
Exemplo n.º 23
0
def get_query_server_config(name='beeswax', server=None, cluster=None):
    LOG.debug("Query cluster %s: %s" % (name, cluster))

    cluster_config = get_cluster_config(cluster)
    if name == "llap":
        activeEndpoint = cache.get('llap')
        if activeEndpoint is None:
            if HIVE_DISCOVERY_LLAP.get():
                LOG.debug(
                    "Checking zookeeper for Hive Server Interactive endpoint")
                zk = KazooClient(hosts=libzookeeper_conf.ENSEMBLE.get(),
                                 read_only=True)
                zk.start()
                if HIVE_DISCOVERY_LLAP_HA.get():
                    znode = "{0}/instances".format(
                        HIVE_DISCOVERY_LLAP_ZNODE.get())
                    LOG.debug(
                        "Setting up LLAP with the following node {0}".format(
                            znode))
                    if zk.exists(znode):
                        hiveservers = zk.get_children(znode)
                        for server in hiveservers:
                            llap_servers = json.loads(
                                zk.get("{0}/{1}".format(
                                    znode, server))[0])["internal"][0]
                            if llap_servers["api"] == "activeEndpoint":
                                cache.set(
                                    "llap",
                                    json.dumps({
                                        "host":
                                        llap_servers["addresses"][0]["host"],
                                        "port":
                                        llap_servers["addresses"][0]["port"]
                                    }), CACHE_TIMEOUT.get())
                    else:
                        LOG.error(
                            "LLAP Endpoint not found, reverting to HiveServer2"
                        )
                        cache.set(
                            "llap",
                            json.dumps({
                                "host":
                                HIVE_SERVER_HOST.get(),
                                "port":
                                hive_site.hiveserver2_thrift_http_port()
                            }), CACHE_TIMEOUT.get())
                else:
                    znode = "{0}".format(HIVE_DISCOVERY_LLAP_ZNODE.get())
                    LOG.debug(
                        "Setting up LLAP with the following node {0}".format(
                            znode))
                    if zk.exists(znode):
                        hiveservers = zk.get_children(znode)
                        for server in hiveservers:
                            cache.set(
                                "llap",
                                json.dumps({
                                    "host":
                                    server.split(';')[0].split('=')[1].split(
                                        ":")[0],
                                    "port":
                                    server.split(';')[0].split('=')[1].split(
                                        ":")[1]
                                }))
                zk.stop()
            else:
                LOG.debug(
                    "Zookeeper Discovery not enabled, reverting to config values"
                )
                cache.set(
                    "llap",
                    json.dumps({
                        "host": LLAP_SERVER_HOST.get(),
                        "port": LLAP_SERVER_THRIFT_PORT.get()
                    }), CACHE_TIMEOUT.get())
        activeEndpoint = json.loads(cache.get("llap"))
    elif name != 'hms' and name != 'impala':
        activeEndpoint = cache.get("hiveserver2")
        if activeEndpoint is None:
            if HIVE_DISCOVERY_HS2.get():
                zk = KazooClient(hosts=libzookeeper_conf.ENSEMBLE.get(),
                                 read_only=True)
                zk.start()
                znode = HIVE_DISCOVERY_HIVESERVER2_ZNODE.get()
                LOG.info("Setting up Hive with the following node {0}".format(
                    znode))
                if zk.exists(znode):
                    hiveservers = zk.get_children(znode)
                    server_to_use = 0  # if CONF.HIVE_SPREAD.get() randint(0, len(hiveservers)-1) else 0
                    cache.set(
                        "hiveserver2",
                        json.dumps({
                            "host":
                            hiveservers[server_to_use].split(";")[0].split(
                                "=")[1].split(":")[0],
                            "port":
                            hiveservers[server_to_use].split(";")[0].split("=")
                            [1].split(":")[1]
                        }))
                else:
                    cache.set(
                        "hiveserver2",
                        json.dumps({
                            "host":
                            HIVE_SERVER_HOST.get(),
                            "port":
                            hive_site.hiveserver2_thrift_http_port()
                        }))
                zk.stop()
            else:
                cache.set(
                    "hiveserver2",
                    json.dumps({
                        "host": HIVE_SERVER_HOST.get(),
                        "port": hive_site.hiveserver2_thrift_http_port()
                    }))
        activeEndpoint = json.loads(cache.get("hiveserver2"))

    if name == 'impala':
        from impala.dbms import get_query_server_config as impala_query_server_config
        query_server = impala_query_server_config(
            cluster_config=cluster_config)
    elif name == 'hms':
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(
            HIVE_SERVER_HOST.get())
        query_server = {
            'server_name':
            'hms',
            'server_host':
            HIVE_METASTORE_HOST.get()
            if not cluster_config else cluster_config.get('server_host'),
            'server_port':
            HIVE_METASTORE_PORT.get(),
            'principal':
            kerberos_principal,
            'transport_mode':
            'http'
            if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
            'auth_username':
            AUTH_USERNAME.get(),
            'auth_password':
            AUTH_PASSWORD.get()
        }
    else:
        kerberos_principal = hive_site.get_hiveserver2_kerberos_principal(
            HIVE_SERVER_HOST.get())
        query_server = {
            'server_name':
            'beeswax',
            'server_host':
            activeEndpoint["host"],
            'server_port':
            LLAP_SERVER_PORT.get()
            if name == 'llap' else HIVE_SERVER_PORT.get(),
            'principal':
            kerberos_principal,
            'http_url':
            '%(protocol)s://%(host)s:%(port)s/%(end_point)s' % {
                'protocol': 'https' if hiveserver2_use_ssl() else 'http',
                'host': activeEndpoint["host"],
                'port': activeEndpoint["port"],
                'end_point': hive_site.hiveserver2_thrift_http_path()
            },
            'transport_mode':
            'http'
            if hive_site.hiveserver2_transport_mode() == 'HTTP' else 'socket',
            'auth_username':
            AUTH_USERNAME.get(),
            'auth_password':
            AUTH_PASSWORD.get()
        }
    if name == 'sparksql':  # Spark SQL is almost the same as Hive
        from spark.conf import SQL_SERVER_HOST as SPARK_SERVER_HOST, SQL_SERVER_PORT as SPARK_SERVER_PORT

        query_server.update({
            'server_name': 'sparksql',
            'server_host': SPARK_SERVER_HOST.get(),
            'server_port': SPARK_SERVER_PORT.get()
        })

    debug_query_server = query_server.copy()
    debug_query_server['auth_password_used'] = bool(
        debug_query_server.pop('auth_password'))
    LOG.debug("Query Server: %s" % debug_query_server)

    return query_server