def test_execute_and_wait(self): query = Notebook() query.execute = Mock(return_value={'history_uuid': 1, 'status': 0}) query.check_status = Mock(side_effect=check_status_side_effect) request = Mock() resp = query.execute_and_wait(request=request) assert_equal(1, resp['history_uuid']) assert_equal(2, query.check_status.call_count)
def extract_archive_in_hdfs(request, upload_path, file_name): _upload_extract_archive_script_to_hdfs(request.fs) shell_notebook = Notebook() shell_notebook.add_shell_snippet( shell_command='extract_archive_in_hdfs.sh', arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}], archives=[], files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + file_name}], env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}]) return shell_notebook.execute(request, batch=True)
def test_execute_and_wait(self): query = Notebook() query.execute = Mock(return_value={'history_uuid': 1, 'status': 0}) query.check_status = Mock(side_effect=check_status_side_effect) query.fetch_result_data = Mock(return_value={'results': [[1], [2]]}) request = Mock() resp = query.execute_and_wait(request=request, include_results=True) assert_equal(0, resp.get('status'), resp) assert_equal('available', resp['query_status']['status'], resp) assert_equal([[1], [2]], resp.get('results'), resp) assert_equal(2, query.check_status.call_count)
def extract_archive_in_hdfs(request, upload_path, file_name): _upload_extract_archive_script_to_hdfs(request.fs) output_path = upload_path + '/' + file_name.split('.')[0] shell_notebook = Notebook( description=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name}, isManaged=True, onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path}) ) shell_notebook.add_shell_snippet( shell_command='extract_archive_in_hdfs.sh', arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}], archives=[], files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.quote(file_name)}], env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}] ) return shell_notebook.execute(request, batch=True)
def compress_files_in_hdfs(request, file_names, upload_path, archive_name): _upload_compress_files_script_to_hdfs(request.fs) files = [{ "value": upload_path + '/' + urllib_quote(file_name.encode('utf-8'), SAFE_CHARACTERS_URI) } for file_name in file_names] files.append({ 'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh' }) start_time = json.loads(request.POST.get('start_time', '-1')) shell_notebook = Notebook( name=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path}, isManaged=True, onSuccessUrl='/filebrowser/view=' + urllib_quote( upload_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS)) shell_notebook.add_shell_snippet(shell_command='compress_files_in_hdfs.sh', arguments=[{ 'value': '-u=' + upload_path }, { 'value': '-f=' + ','.join(file_names) }, { 'value': '-n=' + archive_name }], archives=[], files=files, env_var=[{ 'value': 'HADOOP_USER_NAME=${wf:user()}' }], last_executed=start_time) return shell_notebook.execute(request, batch=True)
def extract_archive_in_hdfs(request, upload_path, file_name): _upload_extract_archive_script_to_hdfs(request.fs) output_path = upload_path + '/' + file_name.split('.')[0] start_time = json.loads(request.POST.get('start_time', '-1')) shell_notebook = Notebook( name=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name}, isManaged=True, onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path}) ) shell_notebook.add_shell_snippet( shell_command='extract_archive_in_hdfs.sh', arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}], archives=[], files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.quote(file_name)}], env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}], last_executed=start_time ) return shell_notebook.execute(request, batch=True)
def extract_archive_in_hdfs(request, upload_path, file_name): _upload_extract_archive_script_to_hdfs(request.fs) output_path = upload_path + '/' + file_name.split('.')[0] start_time = json.loads(request.POST.get('start_time', '-1')) shell_notebook = Notebook( name=_('HDFS Extraction of %(upload_path)s/%(file_name)s') % {'upload_path': upload_path, 'file_name': file_name}, isManaged=True, onSuccessUrl='/filebrowser/view=' + urllib.parse.quote(output_path.encode('utf-8'), safe=SAFE_CHARACTERS_URI_COMPONENTS) ) shell_notebook.add_shell_snippet( shell_command='extract_archive_in_hdfs.sh', arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + file_name}, {'value': '-o=' + output_path}], archives=[], files=[{'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh'}, {"value": upload_path + '/' + urllib.parse.quote(file_name)}], env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}], last_executed=start_time ) return shell_notebook.execute(request, batch=True)
def extract_archive_in_hdfs(request, upload_path, file_name): _upload_extract_archive_script_to_hdfs(request.fs) shell_notebook = Notebook() shell_notebook.add_shell_snippet( shell_command='extract_archive_in_hdfs.sh', arguments=[{ 'value': '-u=' + upload_path }, { 'value': '-f=' + file_name }], archives=[], files=[{ 'value': '/user/' + DEFAULT_USER.get() + '/common/extract_archive_in_hdfs.sh' }, { "value": upload_path + '/' + file_name }], env_var=[{ 'value': 'HADOOP_USER_NAME=${wf:user()}' }]) return shell_notebook.execute(request, batch=True)
def compress_files_in_hdfs(request, file_names, upload_path, archive_name): _upload_compress_files_script_to_hdfs(request.fs) files = [{"value": upload_path + '/' + file_name} for file_name in file_names] files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'}) start_time = json.loads(request.POST.get('start_time', '-1')) shell_notebook = Notebook( name=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path}, isManaged=True, onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': upload_path}) ) shell_notebook.add_shell_snippet( shell_command='compress_files_in_hdfs.sh', arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-n=' + archive_name}], archives=[], files=files, env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}], last_executed=start_time ) return shell_notebook.execute(request, batch=True)
def compress_files_in_hdfs(request, file_names, upload_path): _upload_compress_files_script_to_hdfs(request.fs) output_path = upload_path files = [{"value": upload_path + '/' + file_name} for file_name in file_names] files.append({'value': '/user/' + DEFAULT_USER.get() + '/common/compress_files_in_hdfs.sh'}) shell_notebook = Notebook( description=_('HDFS Compression to %(upload_path)s/hue_compressed.zip') % {'upload_path': upload_path}, isManaged=True, onSuccessUrl=reverse('filebrowser.views.view', kwargs={'path': output_path}) ) shell_notebook.add_shell_snippet( shell_command='compress_files_in_hdfs.sh', arguments=[{'value': '-u=' + upload_path}, {'value': '-f=' + ','.join(file_names)}, {'value': '-o=' + output_path}], archives=[], files=files, env_var=[{'value': 'HADOOP_USER_NAME=${wf:user()}'}] ) return shell_notebook.execute(request, batch=True)