Esempio n. 1
0
  def _get_params(self):
    params = {}

    if self.username != DEFAULT_USER.get(): # We impersonate if needed
      params['doAs'] = self.username
      if not self.security_enabled:
        params['user.name'] = DEFAULT_USER.get()

    return params
Esempio n. 2
0
    def _get_params(self):
        params = {}

        if self.username != DEFAULT_USER.get():  # We impersonate if needed
            params['doAs'] = self.username
            if not self._security_enabled:
                params['user.name'] = DEFAULT_USER.get()

        return params
Esempio n. 3
0
 def getuser(self, **options):
   try:
     return User.objects.get(id=1)
   except User.DoesNotExist:
     form = SuperUserChangeForm(
       {
         "username": DEFAULT_USER.get(),
         "password1": DEFAULT_USER_PASSWORD.get(),
         "password2": DEFAULT_USER_PASSWORD.get(),
         "ensure_home_directory": True,
         "is_active": True,
         "is_superuser": True,
       }
     )
     instance = form.save()
     get_profile(instance)
     return User.objects.get(username=DEFAULT_USER.get())
Esempio n. 4
0
def extract_archive_in_hdfs(request, upload_path, file_name):

  _upload_extract_archive_script_to_hdfs(request.fs)

  shell_notebook = Notebook()
  shell_notebook.add_shell_snippet(
      shell_command='extract_archive_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}],
      archives=[],
      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + file_name}],
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}])
  return shell_notebook.execute(request, batch=True)
Esempio n. 5
0
    def handle_noargs(self, **options):
        try:
            user = User.objects.get(username=DEFAULT_USER.get())
        except User.DoesNotExist:
            from useradmin.management.commands.create_sandbox_user import Command
            Command().handle_noargs()
            user = User.objects.get(username=DEFAULT_USER.get())
        error_list = _get_config_errors(user)
        error_message = ""
        for confvar, error in error_list:
            if isinstance(confvar, str):
                cv = confvar
            else:
                cv = confvar.get_fully_qualifying_key()
            error_message += "\n--Variable: %s\n--Current value: %s\n--Error: %s\n" % (cv, confvar, error)

        if error_message:
            sys.stderr.write("Possible missconfigurations: \n %s \n" % error_message)
            sys.exit(1)
        else:
            sys.stdout.write("Smoke test passed \n")
            sys.exit(0)
Esempio n. 6
0
def extract_archive_in_hdfs(request, upload_path, file_name):
  _upload_extract_archive_script_to_hdfs(request.fs)

  output_path = upload_path + '/' + file_name.split('.')[0]

  shell_notebook = Notebook(
      description=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name},
      isManaged=True,
      onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path})
  )

  shell_notebook.add_shell_snippet(
      shell_command='extract_archive_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}],
      archives=[],
      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.quote(file_name)}],
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}]
  )

  return shell_notebook.execute(request, batch=True)
Esempio n. 7
0
def compress_files_in_hdfs(request, file_names, upload_path, archive_name):

    _upload_compress_files_script_to_hdfs(request.fs)

    files = [{
        "value":
        upload_path + '/' +
        urllib_quote(file_name.encode('utf-8'), SAFE_CHARACTERS_URI)
    } for file_name in file_names]
    files.append({
        'value':
        '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'
    })
    start_time = json.loads(request.POST.get('start_time', '-1'))

    shell_notebook = Notebook(
        name=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') %
        {'upload_path': upload_path},
        isManaged=True,
        onSuccessUrl='/filebrowser/view=' + urllib_quote(
            upload_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS))

    shell_notebook.add_shell_snippet(shell_command='compress_files_in_hdfs.sh',
                                     arguments=[{
                                         'value': '-u=' + upload_path
                                     }, {
                                         'value':
                                         '-f=' + ','.join(file_names)
                                     }, {
                                         'value': '-n=' + archive_name
                                     }],
                                     archives=[],
                                     files=files,
                                     env_var=[{
                                         'value':
                                         'HADOOP_USER_NAME=${wf:user()}'
                                     }],
                                     last_executed=start_time)

    return shell_notebook.execute(request, batch=True)
Esempio n. 8
0
def extract_archive_in_hdfs(request, upload_path, file_name):
  _upload_extract_archive_script_to_hdfs(request.fs)

  output_path = upload_path + '/' + file_name.split('.')[0]
  start_time = json.loads(request.POST.get('start_time', '-1'))

  shell_notebook = Notebook(
      name=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name},
      isManaged=True,
      onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path})
  )

  shell_notebook.add_shell_snippet(
      shell_command='extract_archive_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}],
      archives=[],
      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.quote(file_name)}],
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
      last_executed=start_time
  )

  return shell_notebook.execute(request, batch=True)
Esempio n. 9
0
def extract_archive_in_hdfs(request, upload_path, file_name):
  _upload_extract_archive_script_to_hdfs(request.fs)

  output_path = upload_path + '/' + file_name.split('.')[0]
  start_time = json.loads(request.POST.get('start_time', '-1'))

  shell_notebook = Notebook(
      name=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name},
      isManaged=True,
      onSuccessUrl='/filebrowser/view=' + urllib.parse.quote(output_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)
  )

  shell_notebook.add_shell_snippet(
      shell_command='extract_archive_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}],
      archives=[],
      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.parse.quote(file_name)}],
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
      last_executed=start_time
  )

  return shell_notebook.execute(request, batch=True)
Esempio n. 10
0
def extract_archive_in_hdfs(request, upload_path, file_name):

    _upload_extract_archive_script_to_hdfs(request.fs)

    shell_notebook = Notebook()
    shell_notebook.add_shell_snippet(
        shell_command='extract_archive_in_hdfs.sh',
        arguments=[{
            'value': '-u=' + upload_path
        }, {
            'value': '-f=' + file_name
        }],
        archives=[],
        files=[{
            'value':
            '/user/' + DEFAULT_USER.get() +
            '/common/extract_archive_in_hdfs.sh'
        }, {
            "value": upload_path + '/' + file_name
        }],
        env_var=[{
            'value': 'HADOOP_USER_NAME=${wf:user()}'
        }])
    return shell_notebook.execute(request, batch=True)
Esempio n. 11
0
def compress_files_in_hdfs(request, file_names, upload_path, archive_name):

  _upload_compress_files_script_to_hdfs(request.fs)

  files = [{"value": upload_path + '/' + file_name} for file_name in file_names]
  files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'})
  start_time = json.loads(request.POST.get('start_time', '-1'))

  shell_notebook = Notebook(
    name=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path},
    isManaged=True,
    onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': upload_path})
  )

  shell_notebook.add_shell_snippet(
      shell_command='compress_files_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-n=' + archive_name}],
      archives=[],
      files=files,
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
      last_executed=start_time
  )

  return shell_notebook.execute(request, batch=True)
Esempio n. 12
0
def compress_files_in_hdfs(request, file_names, upload_path):

  _upload_compress_files_script_to_hdfs(request.fs)

  output_path = upload_path

  files = [{"value": upload_path + '/' + file_name} for file_name in file_names]
  files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'})

  shell_notebook = Notebook(
    description=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path},
    isManaged=True,
    onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path})
  )

  shell_notebook.add_shell_snippet(
      shell_command='compress_files_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-o=' + output_path}],
      archives=[],
      files=files,
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}]
  )

  return shell_notebook.execute(request, batch=True)
Esempio n. 13
0
  TExecuteStatementReq, TGetOperationStatusReq, TFetchOrientation,\
  TCloseSessionReq, TGetSchemasReq, TGetLogReq, TCancelOperationReq,\
  TCloseOperationReq, TFetchResultsResp, TRowSet, TProtocolVersion

from beeswax import conf as beeswax_conf
from beeswax import hive_site
from beeswax.hive_site import hiveserver2_use_ssl
from beeswax.models import Session, HiveServerQueryHandle, HiveServerQueryHistory
from beeswax.server.dbms import Table, NoSuchObjectException, DataTable,\
                                QueryServerException


LOG = logging.getLogger(__name__)

IMPALA_RESULTSET_CACHE_SIZE = 'impala.resultset.cache.size'
DEFAULT_USER = DEFAULT_USER.get()


class HiveServerTable(Table):
  """
  We get the table details from a DESCRIBE FORMATTED.
  """

  def __init__(self, table_results, table_schema, desc_results, desc_schema):
    if beeswax_conf.THRIFT_VERSION.get() >= 7:
      if not table_results.columns:
        raise NoSuchObjectException()
      self.table = table_results.columns
    else: # Deprecated. To remove in Hue 4.
      if not table_results.rows:
        raise NoSuchObjectException()
Esempio n. 14
0
import logging
import posixpath

from desktop.conf import TIME_ZONE
from desktop.conf import DEFAULT_USER
from desktop.lib.rest.http_client import HttpClient
from desktop.lib.rest.resource import Resource

from liboozie.conf import SECURITY_ENABLED, OOZIE_URL, SSL_CERT_CA_VERIFY
from liboozie.types import WorkflowList, CoordinatorList, Coordinator, Workflow,\
  CoordinatorAction, WorkflowAction, BundleList, Bundle, BundleAction
from liboozie.utils import config_gen

LOG = logging.getLogger(__name__)
DEFAULT_USER = DEFAULT_USER.get()
API_VERSION = 'v1'  # Overridden to v2 for SLA

_XML_CONTENT_TYPE = 'application/xml;charset=UTF-8'


def get_oozie(user, api_version=API_VERSION):
    oozie_url = OOZIE_URL.get()
    secure = SECURITY_ENABLED.get()
    ssl_cert_ca_verify = SSL_CERT_CA_VERIFY.get()

    return OozieApi(oozie_url,
                    user,
                    security_enabled=secure,
                    api_version=api_version,
                    ssl_cert_ca_verify=ssl_cert_ca_verify)
Esempio n. 15
0
def _upload_compress_files_script_to_hdfs(fs):
  if not fs.exists('/user/' + DEFAULT_USER.get() + '/common/'):
    fs.do_as_user(DEFAULT_USER.get(), fs.mkdir, '/user/' + DEFAULT_USER.get() + '/common/')
    fs.do_as_user(DEFAULT_USER.get(), fs.chmod, '/user/' + DEFAULT_USER.get() + '/common/', 0755)

  if not fs.do_as_user(DEFAULT_USER.get(), fs.exists, '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'):
    fs.do_as_user(DEFAULT_USER.get(), fs.copyFromLocal, get_desktop_root() + '/core/src/desktop/lib/tasks/compress_files/compress_in_hdfs.sh',
                          '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh')
    fs.do_as_user(DEFAULT_USER.get(), fs.chmod, '/user/' + DEFAULT_USER.get() + '/common/', 0755)
Esempio n. 16
0
 def username(self):
   try:
     return self._thread_local.user
   except AttributeError:
     return DEFAULT_USER.get()
Esempio n. 17
0
 def username(self):
     try:
         return self._thread_local.user
     except AttributeError:
         return DEFAULT_USER.get()
def _upload_extract_archive_script_to_hdfs(fs):
    if not fs.exists('/user/' + DEFAULT_USER.get() + '/common/'):
        fs.do_as_user(DEFAULT_USER.get(), fs.mkdir,
                      '/user/' + DEFAULT_USER.get() + '/common/')
        fs.do_as_user(DEFAULT_USER.get(), fs.chmod,
                      '/user/' + DEFAULT_USER.get() + '/common/', 0o755)

    if not fs.do_as_user(
            DEFAULT_USER.get(), fs.exists, '/user/' + DEFAULT_USER.get() +
            '/common/extract_archive_in_hdfs.sh'):
        fs.do_as_user(
            DEFAULT_USER.get(), fs.copyFromLocal,
            get_desktop_root() +
            '/core/src/desktop/lib/tasks/extract_archive/extract_in_hdfs.sh',
            '/user/' + DEFAULT_USER.get() +
            '/common/extract_archive_in_hdfs.sh')
        fs.do_as_user(DEFAULT_USER.get(), fs.chmod,
                      '/user/' + DEFAULT_USER.get() + '/common/', 0o755)
Esempio n. 19
0
def _upload_extract_archive_script_to_hdfs(fs):
  if not fs.exists('/user/' + DEFAULT_USER.get() + '/common/'):
    fs.do_as_user(DEFAULT_USER.get(), fs.mkdir, '/user/' + DEFAULT_USER.get() + '/common/')
    fs.do_as_user(DEFAULT_USER.get(), fs.chmod, '/user/' + DEFAULT_USER.get() + '/common/', 0755)

  if not fs.do_as_user(DEFAULT_USER.get(), fs.exists, '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'):
    fs.do_as_user(DEFAULT_USER.get(), fs.copyFromLocal, get_desktop_root() + '/core/src/desktop/lib/tasks/extract_archive/extract_in_hdfs.sh',
                          '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh')
    fs.do_as_user(DEFAULT_USER.get(), fs.chmod, '/user/' + DEFAULT_USER.get() + '/common/', 0755)