Пример #1
0
    def create_session(self, lang='scala', properties=None):
        if not properties and USE_DEFAULT_CONFIGURATION.get():
            user_config = DefaultConfiguration.objects.get_configuration_for_user(
                app='spark', user=self.user)
            if user_config is not None:
                properties = user_config.properties_list

        props = self.get_livy_props(lang, properties)

        api = get_spark_api(self.user)
        response = api.create_session(**props)

        status = api.get_session(response['id'])
        count = 0

        while status['state'] == 'starting' and count < 120:
            status = api.get_session(response['id'])
            count += 1
            time.sleep(1)

        if status['state'] != 'idle':
            info = '\n'.join(status['log']) if status['log'] else 'timeout'
            raise QueryError(
                _('The Spark session is %s and could not be created in the cluster: %s'
                  ) % (status['state'], info))

        return {
            'type': lang,
            'id': response['id'],
            'properties': self.to_properties(props)
        }
Пример #2
0
    def create_session(self, lang='hive', properties=None):
        application = 'beeswax' if lang == 'hive' else lang

        session = Session.objects.get_session(self.user,
                                              application=application)

        if session is None:
            session = dbms.get(self.user,
                               query_server=get_query_server_config(
                                   name=lang)).open_session(self.user)

        response = {'type': lang, 'id': session.id}

        if not properties:

            config = None
            if USE_DEFAULT_CONFIGURATION.get():
                config = DefaultConfiguration.objects.get_configuration_for_user(
                    app=lang, user=self.user)

            if config is not None:
                properties = config.properties_list
            else:
                properties = self.get_properties(lang)

        response['properties'] = properties

        if lang == 'impala':
            http_addr = self._get_impala_server_url(session)
            response['http_addr'] = http_addr

        return response
Пример #3
0
  def create_session(self, lang='hive', properties=None):
    application = 'beeswax' if lang == 'hive' else lang

    session = Session.objects.get_session(self.user, application=application)

    if session is None:
      session = dbms.get(self.user, query_server=get_query_server_config(name=lang)).open_session(self.user)

    response = {
      'type': lang,
      'id': session.id
    }

    if not properties:

      config = None
      if USE_DEFAULT_CONFIGURATION.get():
        config = DefaultConfiguration.objects.get_configuration_for_user(app=lang, user=self.user)

      if config is not None:
        properties = config.properties_list
      else:
        properties = self.get_properties(lang)

    response['properties'] = properties

    if lang == 'impala':
      impala_settings = session.get_formatted_properties()
      http_addr = next((setting['value'] for setting in impala_settings if setting['key'].lower() == 'http_addr'), None)
      response['http_addr'] = http_addr

    return response
Пример #4
0
    def create_session(self, lang='hive', properties=None):
        application = 'beeswax' if lang == 'hive' else lang

        session = Session.objects.get_session(self.user,
                                              application=application)

        reuse_session = session is not None
        if not reuse_session:
            session = dbms.get(self.user,
                               query_server=get_query_server_config(
                                   name=lang)).open_session(self.user)

        response = {'type': lang, 'id': session.id}

        if not properties:
            config = None
            if USE_DEFAULT_CONFIGURATION.get():
                config = DefaultConfiguration.objects.get_configuration_for_user(
                    app=lang, user=self.user)

            if config is not None:
                properties = config.properties_list
            else:
                properties = self.get_properties(lang)

        response['properties'] = properties
        response['reuse_session'] = reuse_session
        response['session_id'] = ''

        try:
            decoded_guid = session.get_handle().sessionId.guid
            response['session_id'] = "%x:%x" % struct.unpack(
                b"QQ", decoded_guid)
        except Exception, e:
            LOG.warn('Failed to decode session handle: %s' % e)
Пример #5
0
    def create_session(self, lang='hive', properties=None):
        application = 'beeswax' if lang == 'hive' or lang == 'llap' else lang

        if has_session_pool():
            session = Session.objects.get_tez_session(
                self.user, application, MAX_NUMBER_OF_SESSIONS.get())
        elif not has_multiple_sessions():
            session = Session.objects.get_session(self.user,
                                                  application=application)
        else:
            session = None

        reuse_session = session is not None
        if not reuse_session:
            db = dbms.get(self.user,
                          query_server=get_query_server_config(
                              name=lang, connector=self.interpreter))
            session = db.open_session(self.user)

        response = {'type': lang, 'id': session.id}

        if not properties:
            config = None
            if USE_DEFAULT_CONFIGURATION.get():
                config = DefaultConfiguration.objects.get_configuration_for_user(
                    app=lang, user=self.user)

            if config is not None:
                properties = config.properties_list
            else:
                properties = self.get_properties(lang)

        response['properties'] = properties
        response['configuration'] = json.loads(session.properties)
        response['reuse_session'] = reuse_session
        response['session_id'] = ''

        try:
            decoded_guid = session.get_handle().sessionId.guid
            response['session_id'] = unpack_guid(decoded_guid)
        except Exception as e:
            LOG.warn('Failed to decode session handle: %s' % e)

        if lang == 'impala' and session:
            http_addr = _get_impala_server_url(session)
            response['http_addr'] = http_addr

        return response
Пример #6
0
  def create_session(self, lang='scala', properties=None):
    if not properties:
      config = None
      if USE_DEFAULT_CONFIGURATION.get():
        config = DefaultConfiguration.objects.get_configuration_for_user(app='spark', user=self.user)

      if config is not None:
        properties = config.properties_list
      else:
        properties = self.get_properties()

    props = dict([(p['name'], p['value']) for p in properties]) if properties is not None else {}

    props['kind'] = lang

    api = get_spark_api(self.user)

    response = api.create_session(**props)

    status = api.get_session(response['id'])
    count = 0

    while status['state'] == 'starting' and count < 120:
      status = api.get_session(response['id'])
      count += 1
      time.sleep(1)

    if status['state'] != 'idle':
      info = '\n'.join(status['log']) if status['log'] else 'timeout'
      raise QueryError(_('The Spark session could not be created in the cluster: %s') % info)

    return {
        'type': lang,
        'id': response['id'],
        'properties': properties
    }
Пример #7
0
    def create_session(self, lang='scala', properties=None):
        if not properties:
            config = None
            if USE_DEFAULT_CONFIGURATION.get():
                config = DefaultConfiguration.objects.get_configuration_for_user(
                    app='spark', user=self.user)

            if config is not None:
                properties = config.properties_list
            else:
                properties = self.get_properties()

        props = dict([(p['name'], p['value'])
                      for p in properties]) if properties is not None else {}

        # HUE-4761: Hue's session request is causing Livy to fail with "JsonMappingException: Can not deserialize
        # instance of scala.collection.immutable.List out of VALUE_STRING token" due to List type values
        # not being formed properly, they are quoted csv strings (without brackets) instead of proper List
        # types, this is for keys; archives, jars, files and pyFiles. The Mako frontend probably should be
        # modified to pass the values as Livy expects but for now we coerce these types to be Lists.
        # Issue only occurs when non-default values are used because the default path properly sets the
        # empty list '[]' for these four values.
        # Note also that Livy has a 90 second timeout for the session request to complete, this needs to
        # be increased for requests that take longer, for example when loading large archives.
        tmparchives = props['archives']
        if type(tmparchives) is not list:
            props['archives'] = tmparchives.split(",")
            LOG.debug("Check List type: archives was not a list")

        tmpjars = props['jars']
        if type(tmpjars) is not list:
            props['jars'] = tmpjars.split(",")
            LOG.debug("Check List type: jars was not a list")

        tmpfiles = props['files']
        if type(tmpfiles) is not list:
            props['files'] = tmpfiles.split(",")
            LOG.debug("Check List type: files was not a list")

        tmppyFiles = props['pyFiles']
        if type(tmppyFiles) is not list:
            props['pyFiles'] = tmppyFiles.split(",")
            LOG.debug("Check List type: pyFiles was not a list")

        # Convert the conf list to a dict for Livy
        listitems = props['conf']
        LOG.debug("Property Spark Conf kvp list from UI is: " + str(listitems))
        confDict = {}
        for i in range(len(listitems)):
            kvp = listitems[i]
            LOG.debug("Property Spark Conf key " + str(i) + " = " +
                      str(kvp.get('key')))
            LOG.debug("Property Spark Conf value " + str(i) + " = " +
                      str(kvp.get('value')))
            confDict[kvp.get('key')] = kvp.get('value')
        props['conf'] = confDict
        LOG.debug("Property Spark Conf dictionary is: " + str(confDict))

        props['kind'] = lang

        api = get_spark_api(self.user)

        response = api.create_session(**props)

        status = api.get_session(response['id'])
        count = 0

        while status['state'] == 'starting' and count < 120:
            status = api.get_session(response['id'])
            count += 1
            time.sleep(1)

        if status['state'] != 'idle':
            info = '\n'.join(status['log']) if status['log'] else 'timeout'
            raise QueryError(
                _('The Spark session could not be created in the cluster: %s')
                % info)

        return {'type': lang, 'id': response['id'], 'properties': properties}
Пример #8
0
    def create_session(self, lang='hive', properties=None):
        application = 'beeswax' if lang == 'hive' or lang == 'llap' else lang

        uses_session_pool = has_session_pool()
        uses_multiple_sessions = has_multiple_sessions()

        if lang == 'impala':
            uses_session_pool = False
            uses_multiple_sessions = False

        try:
            if uses_session_pool:
                session = Session.objects.get_tez_session(
                    self.user, application, MAX_NUMBER_OF_SESSIONS.get())
            elif not uses_multiple_sessions:
                session = Session.objects.get_session(self.user,
                                                      application=application)
            else:
                session = None
        except Exception as e:
            if 'Connection refused' in str(
                    e) or 'Name or service not known' in str(e):
                LOG.exception(
                    'Connection being refused or service is not available in either session or in multiple sessions'
                    '- HA failover')
                reset_ha()

        reuse_session = session is not None
        if not reuse_session:
            db = dbms.get(self.user,
                          query_server=get_query_server_config(
                              name=lang, connector=self.interpreter))
            try:
                session = db.open_session(self.user)
            except Exception as e:
                if 'Connection refused' in str(
                        e) or 'Name or service not known' in str(e):
                    LOG.exception(
                        'Connection being refused or service is not available in reuse session - HA failover'
                    )
                    reset_ha()

        response = {'type': lang, 'id': session.id}

        if not properties:
            config = None
            if USE_DEFAULT_CONFIGURATION.get():
                config = DefaultConfiguration.objects.get_configuration_for_user(
                    app=lang, user=self.user)

            if config is not None:
                properties = config.properties_list
            else:
                properties = self.get_properties(lang)

        response['properties'] = properties
        response['configuration'] = json.loads(session.properties)
        response['reuse_session'] = reuse_session
        response['session_id'] = ''

        try:
            decoded_guid = session.get_handle().sessionId.guid
            response['session_id'] = unpack_guid(decoded_guid)
        except Exception as e:
            LOG.warning('Failed to decode session handle: %s' % e)

        if lang == 'impala' and session:
            http_addr = _get_impala_server_url(session)
            response['http_addr'] = http_addr

        return response
Пример #9
0
  def create_session(self, lang='scala', properties=None):
    if not properties:
      config = None
      if USE_DEFAULT_CONFIGURATION.get():
        config = DefaultConfiguration.objects.get_configuration_for_user(app='spark', user=self.user)

      if config is not None:
        properties = config.properties_list
      else:
        properties = self.get_properties()

    props = dict([(p['name'], p['value']) for p in properties]) if properties is not None else {}


    # HUE-4761: Hue's session request is causing Livy to fail with "JsonMappingException: Can not deserialize
    # instance of scala.collection.immutable.List out of VALUE_STRING token" due to List type values
    # not being formed properly, they are quoted csv strings (without brackets) instead of proper List
    # types, this is for keys; archives, jars, files and pyFiles. The Mako frontend probably should be
    # modified to pass the values as Livy expects but for now we coerce these types to be Lists.
    # Issue only occurs when non-default values are used because the default path properly sets the
    # empty list '[]' for these four values.
    # Note also that Livy has a 90 second timeout for the session request to complete, this needs to
    # be increased for requests that take longer, for example when loading large archives.
    tmparchives = props['archives']
    if type(tmparchives) is not list:
      props['archives'] = tmparchives.split(",")
      LOG.debug("Check List type: archives was not a list")

    tmpjars = props['jars']
    if type(tmpjars) is not list:
      props['jars'] = tmpjars.split(",")
      LOG.debug("Check List type: jars was not a list")

    tmpfiles = props['files']
    if type(tmpfiles) is not list:
      props['files'] = tmpfiles.split(",")
      LOG.debug("Check List type: files was not a list")

    tmppyFiles = props['pyFiles']
    if type(tmppyFiles) is not list:
      props['pyFiles'] = tmppyFiles.split(",")
      LOG.debug("Check List type: pyFiles was not a list")


    props['kind'] = lang

    api = get_spark_api(self.user)

    response = api.create_session(**props)

    status = api.get_session(response['id'])
    count = 0

    while status['state'] == 'starting' and count < 120:
      status = api.get_session(response['id'])
      count += 1
      time.sleep(1)

    if status['state'] != 'idle':
      info = '\n'.join(status['log']) if status['log'] else 'timeout'
      raise QueryError(_('The Spark session could not be created in the cluster: %s') % info)

    return {
        'type': lang,
        'id': response['id'],
        'properties': properties
    }