Exemplo n.º 1
0
def extract_archive_in_hdfs(request, upload_path, file_name):

  _upload_extract_archive_script_to_hdfs(request.fs)

  shell_notebook = Notebook()
  shell_notebook.add_shell_snippet(
      shell_command='extract_archive_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}],
      archives=[],
      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + file_name}],
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}])
  return shell_notebook.execute(request, batch=True)
Exemplo n.º 2
0
def extract_archive_in_hdfs(request, upload_path, file_name):
  _upload_extract_archive_script_to_hdfs(request.fs)

  output_path = upload_path + '/' + file_name.split('.')[0]
  start_time = json.loads(request.POST.get('start_time', '-1'))

  shell_notebook = Notebook(
      name=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name},
      isManaged=True,
      onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path})
  )

  shell_notebook.add_shell_snippet(
      shell_command='extract_archive_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}],
      archives=[],
      files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.quote(file_name)}],
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
      last_executed=start_time
  )

  return shell_notebook.execute(request, batch=True)
Exemplo n.º 3
0
def extract_archive_in_hdfs(request, upload_path, file_name):
    _upload_extract_archive_script_to_hdfs(request.fs)

    output_path = upload_path + '/' + file_name.split('.')[0]
    start_time = json.loads(request.POST.get('start_time', '-1'))

    shell_notebook = Notebook(
        name=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {
            'upload_path': upload_path,
            'file_name': file_name
        },
        isManaged=True,
        onSuccessUrl=reverse('filebrowser.views.view',
                             kwargs={'path': output_path}))

    shell_notebook.add_shell_snippet(
        shell_command='extract_archive_in_hdfs.sh',
        arguments=[{
            'value': '-u=' + upload_path
        }, {
            'value': '-f=' + file_name
        }, {
            'value': '-o=' + output_path
        }],
        archives=[],
        files=[{
            'value':
            '/user/' + DEFAULT_USER.get() +
            '/common/extract_archive_in_hdfs.sh'
        }, {
            "value": upload_path + '/' + urllib.quote(file_name)
        }],
        env_var=[{
            'value': 'HADOOP_USER_NAME=${wf:user()}'
        }],
        last_executed=start_time)

    return shell_notebook.execute(request, batch=True)
Exemplo n.º 4
0
def extract_archive_in_hdfs(request, upload_path, file_name):

    _upload_extract_archive_script_to_hdfs(request.fs)

    shell_notebook = Notebook()
    shell_notebook.add_shell_snippet(
        shell_command='extract_archive_in_hdfs.sh',
        arguments=[{
            'value': '-u=' + upload_path
        }, {
            'value': '-f=' + file_name
        }],
        archives=[],
        files=[{
            'value':
            '/user/' + DEFAULT_USER.get() +
            '/common/extract_archive_in_hdfs.sh'
        }, {
            "value": upload_path + '/' + file_name
        }],
        env_var=[{
            'value': 'HADOOP_USER_NAME=${wf:user()}'
        }])
    return shell_notebook.execute(request, batch=True)
Exemplo n.º 5
0
def compress_files_in_hdfs(request, file_names, upload_path, archive_name):

  _upload_compress_files_script_to_hdfs(request.fs)

  files = [{"value": upload_path + '/' + file_name} for file_name in file_names]
  files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'})
  start_time = json.loads(request.POST.get('start_time', '-1'))

  shell_notebook = Notebook(
    name=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path},
    isManaged=True,
    onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': upload_path})
  )

  shell_notebook.add_shell_snippet(
      shell_command='compress_files_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-n=' + archive_name}],
      archives=[],
      files=files,
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
      last_executed=start_time
  )

  return shell_notebook.execute(request, batch=True)
Exemplo n.º 6
0
def compress_files_in_hdfs(request, file_names, upload_path, archive_name):

  _upload_compress_files_script_to_hdfs(request.fs)

  files = [{"value": upload_path + '/' + file_name} for file_name in file_names]
  files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'})
  start_time = json.loads(request.POST.get('start_time', '-1'))

  shell_notebook = Notebook(
    name=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path},
    isManaged=True,
    onSuccessUrl='/filebrowser/view=' + urllib.quote(upload_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)
  )

  shell_notebook.add_shell_snippet(
      shell_command='compress_files_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-n=' + archive_name}],
      archives=[],
      files=files,
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}],
      last_executed=start_time
  )

  return shell_notebook.execute(request, batch=True)
Exemplo n.º 7
0
def compress_files_in_hdfs(request, file_names, upload_path):

  _upload_compress_files_script_to_hdfs(request.fs)

  output_path = upload_path

  files = [{"value": upload_path + '/' + file_name} for file_name in file_names]
  files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'})

  shell_notebook = Notebook(
    description=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path},
    isManaged=True,
    onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path})
  )

  shell_notebook.add_shell_snippet(
      shell_command='compress_files_in_hdfs.sh',
      arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-o=' + output_path}],
      archives=[],
      files=files,
      env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}]
  )

  return shell_notebook.execute(request, batch=True)