Exemplo n.º 1
0
Arquivo: views.py Projeto: uk0/hue
def index(request):
  is_hue_4 = IS_HUE_4.get() or DISABLE_HUE_3.get()
  if is_hue_4:
    try:
      user_hue_version = json.loads(UserPreferences.objects.get(user=request.user, key='hue_version').value)
      is_hue_4 = user_hue_version >= 4 or DISABLE_HUE_3.get()
    except UserPreferences.DoesNotExist:
      pass

  if request.user.is_superuser and request.COOKIES.get('hueLandingPage') != 'home' and not IS_HUE_4.get():
    return redirect(reverse('about:index'))
  else:
    if is_hue_4:
      return redirect('desktop.views.hue')
    elif USE_NEW_EDITOR.get():
      return redirect('desktop.views.home2')
    else:
      return home(request)
Exemplo n.º 2
0
    def run(self,
            request,
            collection_name,
            envelope,
            input_path,
            start_time=None,
            lib_path=None):
        workspace_path = self._upload_workspace(envelope)

        task = make_notebook(
            name=_('Indexing into %s') % collection_name,
            editor_type='notebook',
            #on_success_url=reverse('search:browse', kwargs={'name': collection_name}),
            #pub_sub_url='assist.collections.refresh',
            is_task=True,
            is_notebook=True,
            last_executed=start_time)

        if not DISABLE_HUE_3.get():  # CDH5
            shell_command_name = "pipeline.sh"
            shell_command = """#!/bin/bash

SPARK_KAFKA_VERSION=0.10 spark2-submit envelope.jar envelope.conf"""
            hdfs_shell_cmd_path = os.path.join(workspace_path,
                                               shell_command_name)
            self.fs.do_as_user(self.username,
                               self.fs.create,
                               hdfs_shell_cmd_path,
                               data=shell_command)
            task.add_shell_snippet(shell_command=shell_command_name,
                                   files=[{
                                       u'value':
                                       u'%s/envelope.conf' % workspace_path
                                   }, {
                                       u'value': hdfs_shell_cmd_path
                                   }, {
                                       u'value': lib_path,
                                   }])
        else:
            task.add_spark_snippet(clazz=None,
                                   jars=lib_path,
                                   arguments=[u'envelope.conf'],
                                   files=[{
                                       u'path':
                                       u'%s/envelope.conf' % workspace_path,
                                       u'type': u'file'
                                   }])

        return task.execute(request, batch=True)