Exemple #1
0
def _start_mini_hs2(cluster):
    HIVE_CONF = cluster.hadoop_conf_dir
    finish = (
        beeswax.conf.HIVE_SERVER_HOST.set_for_testing(get_localhost_name()),
        beeswax.conf.HIVE_SERVER_PORT.set_for_testing(HIVE_SERVER_TEST_PORT),
        beeswax.conf.HIVE_SERVER_BIN.set_for_testing(
            get_run_root('ext/hive/hive') + '/bin/hiveserver2'),
        beeswax.conf.HIVE_CONF_DIR.set_for_testing(HIVE_CONF))

    default_xml = """<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<configuration>

<property>
 <name>javax.jdo.option.ConnectionURL</name>
 <value>jdbc:derby:;databaseName=%(root)s/metastore_db;create=true</value>
 <description>JDBC connect string for a JDBC metastore</description>
</property>

<property>
  <name>hive.server2.enable.impersonation</name>
  <value>false</value>
</property>

<property>
 <name>hive.querylog.location</name>
 <value>%(querylog)s</value>
</property>

</configuration>
""" % {
        'root': cluster._tmpdir,
        'querylog': cluster.log_dir + '/hive'
    }

    file(HIVE_CONF + '/hive-site.xml', 'w').write(default_xml)

    global _SHARED_HIVE_SERVER_PROCESS

    if _SHARED_HIVE_SERVER_PROCESS is None:
        p = _start_server(cluster)
        LOG.info("started")
        cluster.fs.do_as_superuser(cluster.fs.chmod, '/tmp', 0o1777)

        _SHARED_HIVE_SERVER_PROCESS = p

        def kill():
            LOG.info("Killing server (pid %d)." % p.pid)
            os.kill(p.pid, 9)
            p.wait()

        atexit.register(kill)

    def s():
        for f in finish:
            f()
        cluster.stop()

    return s
Exemple #2
0
  def process_request(self, request):
    user = request.user

    if not user or not user.is_authenticated:
      return

    profile = get_profile(user)
    expires_after = AUTH.IDLE_SESSION_TIMEOUT.get()
    now = datetime.now()
    logout = False

    if profile.last_activity and expires_after > 0 and self._total_seconds(now - profile.last_activity) > expires_after:
      logout = True

    # Save last activity for user except when polling
    if not (request.path.strip('/') == 'notebook/api/check_status') \
        and not (request.path.strip('/').startswith('jobbrowser/api/job')) \
        and not (request.path.strip('/') == 'jobbrowser/jobs' and request.POST.get('format') == 'json') \
        and not (request.path.strip('/') == 'desktop/debug/is_idle') \
        and not (request.path.strip('/').startswith('oozie/list_oozie_')):
      try:
        profile.last_activity = datetime.now()
        profile.hostname = get_localhost_name()
        profile.save()
      except DatabaseError:
        LOG.exception('Error saving profile information')

    if logout:
      dt_logout(request, next_page='/')
Exemple #3
0
def _start_mini_hs2(cluster):
    HIVE_CONF = cluster.hadoop_conf_dir
    finish = (
        beeswax.conf.HIVE_SERVER_HOST.set_for_testing(get_localhost_name()),
        beeswax.conf.HIVE_SERVER_PORT.set_for_testing(HIVE_SERVER_TEST_PORT),
        beeswax.conf.HIVE_SERVER_BIN.set_for_testing(get_run_root("ext/hive/hive") + "/bin/hiveserver2"),
        beeswax.conf.HIVE_CONF_DIR.set_for_testing(HIVE_CONF),
    )

    default_xml = """<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<configuration>

<property>
 <name>javax.jdo.option.ConnectionURL</name>
 <value>jdbc:derby:;databaseName=%(root)s/metastore_db;create=true</value>
 <description>JDBC connect string for a JDBC metastore</description>
</property>

<property>
  <name>hive.server2.enable.impersonation</name>
  <value>false</value>
</property>

<property>
 <name>hive.querylog.location</name>
 <value>%(querylog)s</value>
</property>

</configuration>
""" % {
        "root": cluster._tmpdir,
        "querylog": cluster.log_dir + "/hive",
    }

    file(HIVE_CONF + "/hive-site.xml", "w").write(default_xml)

    global _SHARED_HIVE_SERVER_PROCESS

    if _SHARED_HIVE_SERVER_PROCESS is None:
        p = _start_server(cluster)
        LOG.info("started")
        cluster.fs.do_as_superuser(cluster.fs.chmod, "/tmp", 01777)

        _SHARED_HIVE_SERVER_PROCESS = p

        def kill():
            LOG.info("Killing server (pid %d)." % p.pid)
            os.kill(p.pid, 9)
            p.wait()

        atexit.register(kill)

    def s():
        for f in finish:
            f()
        cluster.stop()

    return s
Exemple #4
0
def active_users_per_instance():
    from useradmin.models import UserProfile
    try:
        count = UserProfile.objects.filter(
            last_activity__gt=datetime.now() - timedelta(hours=1),
            hostname=get_localhost_name()).count()
    except:
        LOG.exception('Could not get active_users per instance')
        count = 0
    return count
Exemple #5
0
def get_shared_beeswax_server():
    global _SHARED_HIVE_SERVER
    global _SHARED_HIVE_SERVER_CLOSER
    if _SHARED_HIVE_SERVER is None:

        cluster = pseudo_hdfs4.shared_cluster()

        HIVE_CONF = cluster.hadoop_conf_dir
        finish = (beeswax.conf.HIVE_SERVER_HOST.set_for_testing(
            get_localhost_name()),
                  beeswax.conf.HIVE_SERVER_PORT.set_for_testing(
                      HIVE_SERVER_TEST_PORT),
                  beeswax.conf.HIVE_SERVER_BIN.set_for_testing(
                      get_run_root('ext/hive/hive') + '/bin/hiveserver2'),
                  beeswax.conf.HIVE_CONF_DIR.set_for_testing(HIVE_CONF))

        default_xml = """<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<configuration>

<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:derby:;databaseName=%(root)s/metastore_db;create=true</value>
  <description>JDBC connect string for a JDBC metastore</description>
</property>

 <property>
   <name>hive.server2.enable.impersonation</name>
   <value>false</value>
 </property>

<property>
  <name>hive.querylog.location</name>
  <value>%(querylog)s</value>
</property>

</configuration>
""" % {
            'root': cluster._tmpdir,
            'querylog': cluster.log_dir + '/hive'
        }

        file(HIVE_CONF + '/hive-site.xml', 'w').write(default_xml)

        global _SHARED_HIVE_SERVER_PROCESS

        if _SHARED_HIVE_SERVER_PROCESS is None:
            p = _start_server(cluster)
            LOG.info("started")
            cluster.fs.do_as_superuser(cluster.fs.chmod, '/tmp', 01777)

            _SHARED_HIVE_SERVER_PROCESS = p

            def kill():
                LOG.info("Killing server (pid %d)." % p.pid)
                os.kill(p.pid, 9)
                p.wait()

            atexit.register(kill)

            start = time.time()
            started = False
            sleep = 1

            make_logged_in_client()
            user = User.objects.get(username='******')
            query_server = get_query_server_config()
            db = dbms.get(user, query_server)

            while not started and time.time() - start <= 30:
                try:
                    db.open_session(user)
                    started = True
                    break
                except Exception, e:
                    LOG.info(
                        'HiveServer2 server status not started yet after: %s' %
                        e)
                    time.sleep(sleep)

            if not started:
                raise Exception("Server took too long to come up.")

        def s():
            for f in finish:
                f()
            cluster.stop()

        _SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER = cluster, s
Exemple #6
0
def get_shared_beeswax_server():
  global _SHARED_HIVE_SERVER
  global _SHARED_HIVE_SERVER_CLOSER
  if _SHARED_HIVE_SERVER is None:

    cluster = pseudo_hdfs4.shared_cluster()

    HIVE_CONF = cluster.hadoop_conf_dir
    finish = (
      beeswax.conf.HIVE_SERVER_HOST.set_for_testing(get_localhost_name()),
      beeswax.conf.HIVE_SERVER_PORT.set_for_testing(HIVE_SERVER_TEST_PORT),
      beeswax.conf.HIVE_SERVER_BIN.set_for_testing(get_run_root('ext/hive/hive') + '/bin/hiveserver2'),
      beeswax.conf.HIVE_CONF_DIR.set_for_testing(HIVE_CONF)
    )

    default_xml = """<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>

<configuration>

<property>
  <name>javax.jdo.option.ConnectionURL</name>
  <value>jdbc:derby:;databaseName=%(root)s/metastore_db;create=true</value>
  <description>JDBC connect string for a JDBC metastore</description>
</property>

 <property>
   <name>hive.server2.enable.impersonation</name>
   <value>false</value>
 </property>

<property>
  <name>hive.querylog.location</name>
  <value>%(querylog)s</value>
</property>

</configuration>
""" % {'root': cluster._tmpdir, 'querylog': cluster.log_dir + '/hive'}

    file(HIVE_CONF + '/hive-site.xml', 'w').write(default_xml)

    global _SHARED_HIVE_SERVER_PROCESS

    if _SHARED_HIVE_SERVER_PROCESS is None:
      p = _start_server(cluster)
      LOG.info("started")

      _SHARED_HIVE_SERVER_PROCESS = p
      def kill():
        LOG.info("Killing server (pid %d)." % p.pid)
        os.kill(p.pid, 9)
        p.wait()
      atexit.register(kill)

      start = time.time()
      started = False
      sleep = 0.001

      make_logged_in_client()
      user = User.objects.get(username='******')
      query_server = get_query_server_config()
      db = dbms.get(user, query_server)

      while not started and time.time() - start < 20.0:
        try:
          db.open_session(user)
          started = True
          break
        except Exception, e:
          LOG.info('HiveServer2 server status not started yet after: %s' % e)
          time.sleep(sleep)
          sleep *= 2

      if not started:
        raise Exception("Server took too long to come up.")

    def s():
      for f in finish:
        f()
      cluster.stop()

    _SHARED_HIVE_SERVER, _SHARED_HIVE_SERVER_CLOSER = cluster, s
# limitations under the License.

import saml2
import desktop.conf
import libsaml.conf

from desktop.lib import security_util

__all__ = ['SAML_CONFIG', 'SAML_ATTRIBUTE_MAPPING', 'SAML_CREATE_UNKNOWN_USER']

BASE_URL = "%(protocol)s%(host)s:%(port)d" % {
    'protocol':
    desktop.conf.is_https_enabled() and 'https://' or 'http://',
    'host':
    desktop.conf.HTTP_HOST.get() == '0.0.0.0'
    and security_util.get_localhost_name() or desktop.conf.HTTP_HOST.get(),
    'port':
    desktop.conf.HTTP_PORT.get()
}

ENTITY_ID = libsaml.conf.ENTITY_ID.get().replace('<base_url>', BASE_URL)

SAML_CONFIG = {
    # full path to the xmlsec1 binary programm
    'xmlsec_binary': libsaml.conf.XMLSEC_BINARY.get(),

    # your entity id, usually your subdomain plus the url to the metadata view
    'entityid': ENTITY_ID,

    # directory with attribute mapping
    'attribute_map_dir': libsaml.conf.ATTRIBUTE_MAP_DIR.get(),
Exemple #8
0
# See the License for the specific language governing permissions and
# limitations under the License.

import saml2
import desktop.conf
import libsaml.conf

from desktop.lib import security_util


__all__ = ['SAML_CONFIG', 'SAML_ATTRIBUTE_MAPPING', 'SAML_CREATE_UNKNOWN_USER']


BASE_URL = "%(protocol)s%(host)s:%(port)d" % {
  'protocol': desktop.conf.is_https_enabled() and 'https://' or 'http://',
  'host':  desktop.conf.HTTP_HOST.get() == '0.0.0.0' and security_util.get_localhost_name() or desktop.conf.HTTP_HOST.get(),
  'port':  desktop.conf.HTTP_PORT.get()
}

SAML_CONFIG = {
  # full path to the xmlsec1 binary programm
  'xmlsec_binary': libsaml.conf.XMLSEC_BINARY.get(),

  # your entity id, usually your subdomain plus the url to the metadata view
  'entityid': "%s/saml2/metadata/" % BASE_URL,

  # directory with attribute mapping
  'attribute_map_dir': libsaml.conf.ATTRIBUTE_MAP_DIR.get(),

  # this block states what services we provide
  'service': {