def setup_class(cls): SqoopServerProvider.setup_class() cls.client = make_logged_in_client(username="******", is_superuser=False) cls.user = User.objects.get(username="******") add_to_group("test") grant_access("test", "test", "sqoop")
def setup_class(cls): OozieServerProvider.setup_class() cls.username = '******' cls.home_dir = '/user/%s' % cls.username cls.cluster.fs.do_as_user(cls.username, cls.cluster.fs.create_home_dir, cls.home_dir) cls.client = make_logged_in_client(username=cls.username, is_superuser=False, groupname='test') cls.user = User.objects.get(username=cls.username) grant_access(cls.username, 'test', 'jobsub') grant_access(cls.username, 'test', 'jobbrowser') grant_access(cls.username, 'test', 'oozie') add_to_group(cls.username) cls.prev_user = cls.cluster.fs.user cls.cluster.fs.setuser(cls.username) cls.install_examples() cls.design = cls.create_design() # Run the sleep example, since it doesn't require user home directory design_id = cls.design.id response = cls.client.post(reverse('oozie:submit_workflow', args=[design_id]), data={u'form-MAX_NUM_FORMS': [u''], u'form-INITIAL_FORMS': [u'1'], u'form-0-name': [u'REDUCER_SLEEP_TIME'], u'form-0-value': [u'1'], u'form-TOTAL_FORMS': [u'1']}, follow=True) oozie_jobid = response.context['oozie_workflow'].id OozieServerProvider.wait_until_completion(oozie_jobid) cls.hadoop_job_id = get_hadoop_job_id(cls.oozie, oozie_jobid, 1) cls.hadoop_job_id_short = views.get_shorter_id(cls.hadoop_job_id)
def setup_class(cls): if not is_live_cluster(): raise SkipTest() cls.client = make_logged_in_client(username="******", is_superuser=False) cls.user = User.objects.get(username="******") add_to_group("test") grant_access("test", "test", "libzookeeper") # Create a ZKNode namespace cls.namespace = "TestWithZooKeeper" # Create temporary test directory and file with contents cls.local_directory = tempfile.mkdtemp() # Create subdirectory cls.subdir_name = "subdir" subdir_path = "%s/%s" % (cls.local_directory, cls.subdir_name) os.mkdir(subdir_path, 0755) # Create file cls.filename = "test.txt" file_path = "%s/%s" % (subdir_path, cls.filename) cls.file_contents = "This is a test" file = open(file_path, "w+") file.write(cls.file_contents) file.close()
def setup_class(cls): if not is_live_cluster(): raise SkipTest() cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "libzookeeper") # Create a ZKNode namespace cls.namespace = 'TestWithZooKeeper' # Create temporary test directory and file with contents cls.local_directory = tempfile.mkdtemp() # Create subdirectory cls.subdir_name = 'subdir' subdir_path = '%s/%s' % (cls.local_directory, cls.subdir_name) os.mkdir(subdir_path, 0755) # Create file cls.filename = 'test.txt' file_path = '%s/%s' % (subdir_path, cls.filename) cls.file_contents = "This is a test" file = open(file_path, 'w+') file.write(cls.file_contents) file.close()
def setup_class(cls): cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') cls.user = rewrite_user(cls.user) add_to_group('test') grant_access("test", "test", "metadata") grant_access("test", "test", "optimizer")
def setup_class(cls): SqoopServerProvider.setup_class() cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "sqoop")
def setup_class(cls): cls.finish = [] if not is_live_cluster(): raise SkipTest cls.client = make_logged_in_client() cls.user = User.objects.get(username='******') add_to_group('test') cls.db = dbms.get(cls.user, get_query_server_config(name='impala')) cls.DATABASE = get_db_prefix(name='impala') queries = [ 'DROP TABLE IF EXISTS %(db)s.tweets;' % { 'db': cls.DATABASE }, 'DROP DATABASE IF EXISTS %(db)s CASCADE;' % { 'db': cls.DATABASE }, 'CREATE DATABASE %(db)s;' % { 'db': cls.DATABASE } ] for query in queries: resp = _make_query(cls.client, query, database='default', local=False, server_name='impala') resp = wait_for_query_to_finish(cls.client, resp, max=180.0) content = json.loads(resp.content) assert_true(content['status'] == 0, resp.content) queries = [ """ CREATE TABLE tweets (row_num INTEGER, id_str STRING, text STRING) STORED AS PARQUET; """, """ INSERT INTO TABLE tweets VALUES (1, "531091827395682000", "My dad looks younger than costa"); """, """ INSERT INTO TABLE tweets VALUES (2, "531091827781550000", "There is a thin line between your partner being vengeful and you reaping the consequences of your bad actions towards your partner."); """, """ INSERT INTO TABLE tweets VALUES (3, "531091827768979000", "@Mustang_Sally83 and they need to get into you :))))"); """, """ INSERT INTO TABLE tweets VALUES (4, "531091827114668000", "@RachelZJohnson thank you rach!xxx"); """, """ INSERT INTO TABLE tweets VALUES (5, "531091827949309000", "i think @WWERollins was robbed of the IC title match this week on RAW also i wonder if he will get a rematch i hope so @WWE"); """ ] for query in queries: resp = _make_query(cls.client, query, database=cls.DATABASE, local=False, server_name='impala') resp = wait_for_query_to_finish(cls.client, resp, max=180.0) content = json.loads(resp.content) assert_true(content['status'] == 0, resp.content)
def test_login_does_not_reset_groups(self): client = make_logged_in_client(username=self.test_username, password="******") user = User.objects.get(username=self.test_username) test_group, created = Group.objects.get_or_create( name=self.test_username) default_group = get_default_user_group() user.groups.all().delete() assert_false(user.groups.exists()) # No groups response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal([default_group.name], [i for i in user.groups.values_list('name', flat=True)]) add_to_group(self.test_username, self.test_username) # Two groups client.get('/accounts/logout') response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal(set([default_group.name, test_group.name]), set(user.groups.values_list('name', flat=True))) user.groups.filter(name=default_group.name).delete() assert_equal(set([test_group.name]), set(user.groups.values_list('name', flat=True))) # Keep manual group only, don't re-add default group client.get('/accounts/logout') response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal([test_group.name], list(user.groups.values_list('name', flat=True))) user.groups.remove(test_group) assert_false(user.groups.exists()) # Re-add default group client.get('/accounts/logout') response = client.post('/hue/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal([default_group.name], list(user.groups.values_list('name', flat=True)))
def setUpClass(cls): S3TestBase.setUpClass() if not cls.shouldSkip(): cls.fs = S3FileSystem(cls.s3_connection) cls.c = make_logged_in_client(username='******', is_superuser=False) grant_access('test', 'test', 'filebrowser') add_to_group('test')
def setUp(self): self.c = make_logged_in_client(is_superuser=False) grant_access("test", "test", "indexer") add_to_group("test") self.user = User.objects.get(username='******') self.solr_client = SolrClient(self.user, api=MockSolrCdhCloudHdfsApi()) self.finish = ENABLE_NEW_INDEXER.set_for_testing(True)
def setUp(self): self.client = make_logged_in_client(username="******", groupname="test", recreate=False, is_superuser=False) self.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "notebook") self.db = dbms.get(self.user, get_query_server_config()) self.api = HS2Api(self.user)
def setup_class(cls): if not is_live_cluster(): raise SkipTest('These tests can only run on a live cluster') cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "indexer")
def setup_class(cls): if not is_live_cluster(): raise SkipTest() cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "libzookeeper")
def setup_class(cls): cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') cls.user = rewrite_user(cls.user) add_to_group('test') grant_access("test", "test", "metadata") if not is_live_cluster() or not has_navigator(cls.user): raise SkipTest
def setup_class(cls): cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') cls.user = rewrite_user(cls.user) add_to_group('test') grant_access("test", "test", "metadata") cls.api = NavigatorApi(cls.user) cls.api._root = MockedRoot()
def setup_class(cls): cls.db_name = get_db_prefix(name='hive') cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name) cls.client = make_logged_in_client(username='******', is_superuser=False) add_to_group('test') grant_access("test", "test", "beeswax") # Weird redirection to avoid binding nonsense. cls.shutdown = [shutdown] cls.init_beeswax_db()
def setup_class(cls): cls.db_name = get_db_prefix(name="hive") cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name) cls.client = make_logged_in_client(username="******", is_superuser=False) add_to_group("test") grant_access("test", "test", "beeswax") # Weird redirection to avoid binding nonsense. cls.shutdown = [shutdown] cls.init_beeswax_db()
def setUp(self): self.finish = [] # We need a real Impala cluster currently if not 'impala' in sys.argv and not os.environ.get( 'TEST_IMPALAD_HOST'): raise SkipTest if os.environ.get('TEST_IMPALAD_HOST'): self.finish.append( SERVER_HOST.set_for_testing( os.environ.get('TEST_IMPALAD_HOST'))) self.client = make_logged_in_client() self.user = User.objects.get(username='******') add_to_group('test') self.db = dbms.get(self.user, get_query_server_config(name='impala')) hql = """ USE default; DROP TABLE IF EXISTS %(db)s.tweets; DROP DATABASE IF EXISTS %(db)s; CREATE DATABASE %(db)s; USE %(db)s; """ % { 'db': self.DATABASE } resp = _make_query(self.client, hql, database='default', local=False, server_name='impala') resp = wait_for_query_to_finish(self.client, resp, max=30.0) hql = """ CREATE TABLE tweets (row_num INTEGER, id_str STRING, text STRING) STORED AS PARQUET; INSERT INTO TABLE tweets VALUES (1, "531091827395682000", "My dad looks younger than costa"); INSERT INTO TABLE tweets VALUES (2, "531091827781550000", "There is a thin line between your partner being vengeful and you reaping the consequences of your bad actions towards your partner."); INSERT INTO TABLE tweets VALUES (3, "531091827768979000", "@Mustang_Sally83 and they need to get into you :))))"); INSERT INTO TABLE tweets VALUES (4, "531091827114668000", "@RachelZJohnson thank you rach!xxx"); INSERT INTO TABLE tweets VALUES (5, "531091827949309000", "i think @WWERollins was robbed of the IC title match this week on RAW also i wonder if he will get a rematch i hope so @WWE"); """ resp = _make_query(self.client, hql, database=self.DATABASE, local=False, server_name='impala') resp = wait_for_query_to_finish(self.client, resp, max=30.0) def tearDown(self): for f in self.finish: f()
def setUp(self): self.c = make_logged_in_client(username='******', is_superuser=False) grant_access('test', 'test', 'filebrowser') add_to_group('test') self.user = User.objects.get(username='******') self.cluster = pseudo_hdfs4.shared_cluster() self.cluster.fs.setuser('test') self.prefix = self.cluster.fs_prefix + '/filebrowser' self.cluster.fs.do_as_user('test', self.cluster.fs.create_home_dir, '/user/test')
def setup_class(cls): cls.db_name = get_db_prefix(name='hive') cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name) cls.client = make_logged_in_client(username='******', is_superuser=False) add_to_group('test', 'test') grant_access('test', 'test', 'beeswax') grant_access('test', 'test', 'metastore') # Weird redirection to avoid binding nonsense. cls.shutdown = [ shutdown ] cls.init_beeswax_db()
def setup_class(cls): if not os.path.exists(os.path.join(SENTRY_CONF_DIR.get(), 'sentry-site.xml')): raise SkipTest('Could not find sentry-site.xml, skipping sentry tests') cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "libsentry") cls.config_path = os.path.join(SENTRY_CONF_DIR.get(), 'sentry-site.xml')
def setup_class(cls): cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "indexer") global _IS_SOLR_CLOUD global _IS_SOLR_6_OR_MORE global _IS_SOLR_WITH_HDFS global _ZOOKEEPER_HOST
def setup_class(cls): if not is_live_cluster(): raise SkipTest() cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "libsentry") cls.db = SentryClient(HOSTNAME.get(), PORT.get(), 'test')
def setup_class(cls): if not is_live_cluster() or not is_navigator_enabled(): raise SkipTest cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "metadata") grant_access("test", "test", "navigator") cls.api = NavigatorApi()
def setUp(self): self.client.post('/beeswax/install_examples') self.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "notebook") self.db = dbms.get(self.user, get_query_server_config()) self.cluster.fs.do_as_user('test', self.cluster.fs.create_home_dir, '/user/test') self.api = HS2Api(self.user) self.statement = 'SELECT description, salary FROM sample_07 WHERE (sample_07.salary > 100000) ORDER BY salary DESC LIMIT 1000'
def setup_class(cls): if not is_live_cluster() or not cls.is_navigator_enabled(): raise SkipTest cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "metadata") grant_access("test", "test", "navigator") cls.api = NavigatorApi()
def setup_class(cls): if not is_optimizer_enabled(): raise SkipTest cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "metadata") grant_access("test", "test", "optimizer") cls.api = OptimizerApi()
def setup_class(cls): if not is_live_cluster() or not is_optimizer_enabled(): raise SkipTest cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') cls.user = rewrite_user(cls.user) add_to_group('test') grant_access("test", "test", "metadata") grant_access("test", "test", "optimizer") cls.api = OptimizerApi()
def setUp(self): if not hasattr(api, 'OriginalSentryApi'): api.OriginalSentryApi = api.get_api api.get_api = mocked_get_api self.client = make_logged_in_client(username='******', groupname='test', is_superuser=False) self.client_admin = make_logged_in_client(username='******', groupname='hue', is_superuser=False) grant_access("sentry_test", "test", "security") grant_access("sentry_hue", "hue", "security") add_to_group("sentry_test") add_to_group("sentry_hue") raise SkipTest
def setup_class(cls): if not is_live_cluster(): raise SkipTest() cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "indexer") resp = cls.client.post(reverse('indexer:install_examples')) content = json.loads(resp.content) assert_equal(content.get('status'), 0)
def setup_class(cls): if not is_live_cluster(): raise SkipTest('Sentry tests require a live sentry server') if not os.path.exists(os.path.join(SENTRY_CONF_DIR.get(), 'sentry-site.xml')): raise SkipTest('Could not find sentry-site.xml, skipping sentry tests') cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "libsentry") cls.db = SentryClient(HOSTNAME.get(), PORT.get(), 'test')
def setUp(self): if not is_abfs_enabled(): raise SkipTest self.client = ABFS.from_config( ABFS_CLUSTERS['default'], ActiveDirectory.from_config(AZURE_ACCOUNTS['default'], version='v2.0')) self.c = make_logged_in_client(username='******', is_superuser=False) grant_access('test', 'test', 'filebrowser') add_to_group('test') self.user = User.objects.get(username="******") self.test_fs = 'abfs://test' + (str(int(time.time()))) LOG.debug("%s" % self.test_fs) self.client.mkdir(self.test_fs)
def setup_class(cls, load_data=True): cls.load_data = load_data cls.db_name = get_db_prefix(name='hive') cls.cluster, shutdown = get_shared_beeswax_server(cls.db_name) cls.set_execution_engine() cls.client = make_logged_in_client(username='******', is_superuser=False) add_to_group('test', 'test') grant_access('test', 'test', 'beeswax') grant_access('test', 'test', 'metastore') # Weird redirection to avoid binding nonsense. cls.shutdown = [ shutdown ] cls.init_beeswax_db()
def setUp(self): """ To clean: creating test1, test2, test3...users """ TestJobBrowserWithHadoop.user_count += 1 self.username = '******' + str(TestJobBrowserWithHadoop.user_count) self.home_dir = '/user/%s' % self.username self.cluster.fs.do_as_user(self.username, self.cluster.fs.create_home_dir, self.home_dir) self.client = make_logged_in_client(username=self.username, is_superuser=False, groupname='test') self.user = User.objects.get(username=self.username) grant_access(self.username, 'test', 'jobsub') grant_access(self.username, 'test', 'jobbrowser') grant_access(self.username, 'test', 'oozie') add_to_group(self.username) self.prev_user = self.cluster.fs.user self.cluster.fs.setuser(self.username) self.install_examples() self.design = self.create_design() raise SkipTest # Run the sleep example, since it doesn't require user home directory design_id = self.design.id response = self.client.post(reverse('oozie:submit_workflow', args=[design_id]), data={ u'form-MAX_NUM_FORMS': [u''], u'form-INITIAL_FORMS': [u'1'], u'form-0-name': [u'REDUCER_SLEEP_TIME'], u'form-0-value': [u'1'], u'form-TOTAL_FORMS': [u'1'] }, follow=True) oozie_jobid = response.context['oozie_workflow'].id OozieServerProvider.wait_until_completion(oozie_jobid, timeout=120, step=1) self.hadoop_job_id = get_hadoop_job_id(self.oozie, oozie_jobid, 1) self.hadoop_job_id_short = views.get_shorter_id(self.hadoop_job_id)
def setup_class(cls): if not is_live_cluster(): raise SkipTest('Sentry tests require a live sentry server') if not os.path.exists( os.path.join(SENTRY_CONF_DIR.get(), 'sentry-site.xml')): raise SkipTest( 'Could not find sentry-site.xml, skipping sentry tests') cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "libsentry") cls.db = SentryClient(HOSTNAME.get(), PORT.get(), 'test')
def setup_class(cls): if not has_optimizer(): raise SkipTest cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') cls.user = rewrite_user(cls.user) add_to_group('test') grant_access("test", "test", "metadata") grant_access("test", "test", "optimizer") cls.api = OptimizerApi(user=cls.user) if not BaseTestOptimizerApi.UPLOADED: cls.upload() BaseTestOptimizerApi.UPLOADED = True
def test_get_design(self): response = self.client.get(reverse('jobsub:jobsub.views.get_design', kwargs={'design_id': self.design.id}), HTTP_X_REQUESTED_WITH='XMLHttpRequest') assert_equal(response.status_code, 200) client_note_me = make_logged_in_client(username='******', is_superuser=False) grant_access("jobsub_test_note_me", "jobsub_test_note_me", "jobsub") add_to_group("jobsub_test_note_me") response = client_note_me.get(reverse('jobsub:jobsub.views.get_design', kwargs={'design_id': self.design.id}), HTTP_X_REQUESTED_WITH='XMLHttpRequest') assert_equal(response.status_code, 500) data = json.loads(response.content) assert_true('does not have the permissions required to access document' in data.get('message', ''), response.content)
def test_get_design(self): response = self.client.get(reverse('jobsub.views.get_design', kwargs={'design_id': self.design.id}), HTTP_X_REQUESTED_WITH='XMLHttpRequest') assert_equal(response.status_code, 200) client_note_me = make_logged_in_client(username='******', is_superuser=False) grant_access("jobsub_test_note_me", "jobsub_test_note_me", "jobsub") add_to_group("jobsub_test_note_me") response = client_note_me.get(reverse('jobsub.views.get_design', kwargs={'design_id': self.design.id}), HTTP_X_REQUESTED_WITH='XMLHttpRequest') assert_equal(response.status_code, 500) data = json.loads(response.content) assert_true('does not have the permissions required to access document' in data.get('message', ''), response.content)
def setup_class(cls): cls.finish = [] if not is_live_cluster(): raise SkipTest cls.client = make_logged_in_client() cls.user = User.objects.get(username='******') add_to_group('test') cls.db = dbms.get(cls.user, get_query_server_config(name='impala')) cls.DATABASE = get_db_prefix(name='impala') queries = [""" DROP TABLE IF EXISTS %(db)s.tweets; """ % {'db': cls.DATABASE}, """ DROP DATABASE IF EXISTS %(db)s CASCADE; """ % {'db': cls.DATABASE}, """ CREATE DATABASE %(db)s; """ % {'db': cls.DATABASE}] for query in queries: resp = _make_query(cls.client, query, database='default', local=False, server_name='impala') resp = wait_for_query_to_finish(cls.client, resp, max=180.0) content = json.loads(resp.content) assert_true(content['status'] == 0, resp.content) queries = [""" CREATE TABLE tweets (row_num INTEGER, id_str STRING, text STRING) STORED AS PARQUET; """, """ INSERT INTO TABLE tweets VALUES (1, "531091827395682000", "My dad looks younger than costa"); """, """ INSERT INTO TABLE tweets VALUES (2, "531091827781550000", "There is a thin line between your partner being vengeful and you reaping the consequences of your bad actions towards your partner."); """, """ INSERT INTO TABLE tweets VALUES (3, "531091827768979000", "@Mustang_Sally83 and they need to get into you :))))"); """, """ INSERT INTO TABLE tweets VALUES (4, "531091827114668000", "@RachelZJohnson thank you rach!xxx"); """, """ INSERT INTO TABLE tweets VALUES (5, "531091827949309000", "i think @WWERollins was robbed of the IC title match this week on RAW also i wonder if he will get a rematch i hope so @WWE"); """] for query in queries: resp = _make_query(cls.client, query, database=cls.DATABASE, local=False, server_name='impala') resp = wait_for_query_to_finish(cls.client, resp, max=180.0) content = json.loads(resp.content) assert_true(content['status'] == 0, resp.content)
def setUp(self): self.finish = [] # We need a real Impala cluster currently if not 'impala' in sys.argv and not os.environ.get('TEST_IMPALAD_HOST'): raise SkipTest if os.environ.get('TEST_IMPALAD_HOST'): self.finish.append(SERVER_HOST.set_for_testing(os.environ.get('TEST_IMPALAD_HOST'))) self.client = make_logged_in_client() self.user = User.objects.get(username='******') add_to_group('test') self.db = dbms.get(self.user, get_query_server_config(name='impala')) hql = """ USE default; DROP TABLE IF EXISTS %(db)s.tweets; DROP DATABASE IF EXISTS %(db)s; CREATE DATABASE %(db)s; USE %(db)s; """ % {'db': self.DATABASE} resp = _make_query(self.client, hql, database='default', local=False, server_name='impala') resp = wait_for_query_to_finish(self.client, resp, max=30.0) hql = """ CREATE TABLE tweets (row_num INTEGER, id_str STRING, text STRING) STORED AS PARQUET; INSERT INTO TABLE tweets VALUES (1, "531091827395682000", "My dad looks younger than costa"); INSERT INTO TABLE tweets VALUES (2, "531091827781550000", "There is a thin line between your partner being vengeful and you reaping the consequences of your bad actions towards your partner."); INSERT INTO TABLE tweets VALUES (3, "531091827768979000", "@Mustang_Sally83 and they need to get into you :))))"); INSERT INTO TABLE tweets VALUES (4, "531091827114668000", "@RachelZJohnson thank you rach!xxx"); INSERT INTO TABLE tweets VALUES (5, "531091827949309000", "i think @WWERollins was robbed of the IC title match this week on RAW also i wonder if he will get a rematch i hope so @WWE"); """ resp = _make_query(self.client, hql, database=self.DATABASE, local=False, server_name='impala') resp = wait_for_query_to_finish(self.client, resp, max=30.0) def tearDown(self): for f in self.finish: f()
def setup_class(cls): if not is_live_cluster() or not search_enabled: raise SkipTest cls.client = make_logged_in_client(username='******', is_superuser=False) cls.user = User.objects.get(username='******') add_to_group('test') grant_access("test", "test", "libsolr") grant_access("test", "test", "search") cls.user.is_superuser = True cls.user.save() resp = cls.client.post(reverse('search:install_examples')) content = json.loads(resp.content) cls.user.is_superuser = False cls.user.save() assert_equal(content.get('status'), 0)
def test_login_does_not_reset_groups(self): client = make_logged_in_client(username=self.test_username, password="******") user = User.objects.get(username=self.test_username) test_group, created = Group.objects.get_or_create(name=self.test_username) default_group = get_default_user_group() user.groups.all().delete() assert_false(user.groups.exists()) # No groups response = client.post('/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal([default_group.name], list(user.groups.values_list('name', flat=True))) add_to_group(self.test_username, self.test_username) # Two groups client.get('/accounts/logout') response = client.post('/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal(set([default_group.name, test_group.name]), set(user.groups.values_list('name', flat=True))) user.groups.filter(name=default_group.name).delete() assert_equal(set([test_group.name]), set(user.groups.values_list('name', flat=True))) # Keep manual group only, don't re-add default group client.get('/accounts/logout') response = client.post('/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal([test_group.name], list(user.groups.values_list('name', flat=True))) user.groups.remove(test_group) assert_false(user.groups.exists()) # Re-add default group client.get('/accounts/logout') response = client.post('/accounts/login/', dict(username=self.test_username, password="******"), follow=True) assert_equal(200, response.status_code, "Expected ok status.") assert_equal([default_group.name], list(user.groups.values_list('name', flat=True)))
def setUp(self): """ To clean: creating test1, test2, test3...users """ TestJobBrowserWithHadoop.user_count += 1 self.username = '******' + str(TestJobBrowserWithHadoop.user_count) self.home_dir = '/user/%s' % self.username self.cluster.fs.do_as_user(self.username, self.cluster.fs.create_home_dir, self.home_dir) self.client = make_logged_in_client(username=self.username, is_superuser=False, groupname='test') self.user = User.objects.get(username=self.username) grant_access(self.username, 'test', 'jobsub') grant_access(self.username, 'test', 'jobbrowser') grant_access(self.username, 'test', 'oozie') add_to_group(self.username) self.prev_user = self.cluster.fs.user self.cluster.fs.setuser(self.username) self.install_examples() self.design = self.create_design() raise SkipTest # Run the sleep example, since it doesn't require user home directory design_id = self.design.id response = self.client.post(reverse('oozie:submit_workflow', args=[design_id]), data={u'form-MAX_NUM_FORMS': [u''], u'form-INITIAL_FORMS': [u'1'], u'form-0-name': [u'REDUCER_SLEEP_TIME'], u'form-0-value': [u'1'], u'form-TOTAL_FORMS': [u'1']}, follow=True) oozie_jobid = response.context['oozie_workflow'].id OozieServerProvider.wait_until_completion(oozie_jobid, timeout=120, step=1) self.hadoop_job_id = get_hadoop_job_id(self.oozie, oozie_jobid, 1) self.hadoop_job_id_short = views.get_shorter_id(self.hadoop_job_id)
def setup_class(cls): OozieServerProvider.setup_class() cls.username = "******" cls.home_dir = "/user/%s" % cls.username cls.cluster.fs.do_as_user(cls.username, cls.cluster.fs.create_home_dir, cls.home_dir) cls.client = make_logged_in_client(username=cls.username, is_superuser=False, groupname="test") cls.user = User.objects.get(username=cls.username) grant_access(cls.username, "test", "jobsub") grant_access(cls.username, "test", "jobbrowser") grant_access(cls.username, "test", "oozie") add_to_group(cls.username) cls.prev_user = cls.cluster.fs.user cls.cluster.fs.setuser(cls.username) cls.install_examples() cls.design = cls.create_design() # Run the sleep example, since it doesn't require user home directory design_id = cls.design.id response = cls.client.post( reverse("oozie:submit_workflow", args=[design_id]), data={ u"form-MAX_NUM_FORMS": [u""], u"form-INITIAL_FORMS": [u"1"], u"form-0-name": [u"REDUCER_SLEEP_TIME"], u"form-0-value": [u"1"], u"form-TOTAL_FORMS": [u"1"], }, follow=True, ) oozie_jobid = response.context["oozie_workflow"].id OozieServerProvider.wait_until_completion(oozie_jobid) cls.hadoop_job_id = get_hadoop_job_id(cls.oozie, oozie_jobid, 1) cls.hadoop_job_id_short = views.get_shorter_id(cls.hadoop_job_id)
def setUp(self): self.c = make_logged_in_client(is_superuser=False) grant_access("test", "test", "indexer") add_to_group("test")