def test_main(self): with tests.capture_stdout() as mock_stdout: main(self.project_id) stdout = mock_stdout.getvalue() self.assertRegexpMatches(stdout, re.compile( r'Query Results:.hamlet', re.DOTALL))
def test_main(self): with tests.capture_stdout() as mock_stdout: main(self.project_id) stdout = mock_stdout.getvalue() self.assertRegexpMatches( stdout, re.compile(r'Query Results:.hamlet', re.DOTALL))
def test_main(self): with tests.capture_stdout() as stdout: list_logs.main(self.project_id) output = stdout.getvalue().strip() self.assertRegexpMatches(output, re.compile(r'.*', re.S))
def test_main(self): with tests.capture_stdout() as mock_stdout: getting_started.main(self.constants['projectId']) stdout = mock_stdout.getvalue() self.assertRegexpMatches(stdout, re.compile( r'Query Results:.hamlet', re.DOTALL))
def test_main(self): with tests.capture_stdout(): main(self.project_id, self.bucket_name, 'us-central1-f', 'test-instance', wait=False)
def test_main(self): with tests.capture_stdout() as stdout: list_logs.main(self.project_id) output = stdout.getvalue().strip() self.assertRegexpMatches( output, re.compile(r'.*', re.S))
def test_main(self): with tests.capture_stdout() as stdout: auth.main(self.test_project_id) output = stdout.getvalue().strip() self.assertRegexpMatches( output, re.compile(r'Timeseries.list raw response:\s*' r'{\s*"kind": "[^"]+",' r'\s*"oldest": *"[0-9]+', re.S))
def test_main(self): with tests.capture_stdout(): main( self.project_id, self.bucket_name, 'us-central1-f', 'test-instance', wait=False)
def test_sync_query(self): with capture_stdout() as stdout: main( self.constants['projectId'], self.constants['query'], 30, 5) result = stdout.getvalue().split('\n')[0] self.assertIsNotNone(json.loads(result))
def test_main(self): with tests.capture_stdout() as mock_stdout: main(self.project_id) stdout = mock_stdout.getvalue() self.assertRegexpMatches(stdout, re.compile( r'Project list:.*bigquery#projectList.*projects', re.DOTALL)) self.assertRegexpMatches(stdout, re.compile( r'Dataset list:.*datasets.*datasetId', re.DOTALL))
def test_main(self): with tests.capture_stdout() as mock_stdout: list_datasets_projects.main(self.constants['projectId']) stdout = mock_stdout.getvalue() self.assertRegexpMatches(stdout, re.compile( r'Project list:.*bigquery#projectList.*projects', re.DOTALL)) self.assertRegexpMatches(stdout, re.compile( r'Dataset list:.*datasets.*datasetId', re.DOTALL))
def test_load_csv_data(self): schema_path = os.path.join(self.resource_path, 'schema.json') data_path = os.path.join(self.resource_path, 'data.csv') with tests.capture_stdout() as mock_stdout: load_data(schema_path, data_path, self.project_id, self.dataset_id, self.table_id) stdout = mock_stdout.getvalue() self.assertRegexpMatches( stdout, re.compile(r'Waiting for job to finish.*Job complete.', re.DOTALL))
def test_sync_query(self): query = ('SELECT corpus FROM publicdata:samples.shakespeare ' 'GROUP BY corpus;') with capture_stdout() as stdout: main(project_id=self.project_id, query=query, timeout=30, num_retries=5) result = stdout.getvalue().split('\n')[0] self.assertIsNotNone(json.loads(result))
def test_async_query(self): with tests.capture_stdout() as stdout: main( self.constants['projectId'], self.constants['query'], False, 5, 5) value = stdout.getvalue().strip().split('\n').pop() self.assertIsNotNone( json.loads(value))
def test_sync_query(self): query = ( 'SELECT corpus FROM publicdata:samples.shakespeare ' 'GROUP BY corpus;') with capture_stdout() as stdout: main( project_id=self.project_id, query=query, timeout=30, num_retries=5) result = stdout.getvalue().split('\n')[0] self.assertIsNotNone(json.loads(result))
def test_main(self): with tests.capture_stdout() as mock_stdout: main(self.project_id, self.bucket_name, 'us-central1-f', 'test-instance', wait=False) stdout = mock_stdout.getvalue() expected_output = re.compile( (r'Instances in project %s and zone us-central1-.* - test-instance' r'.*Deleting instance.*done..$') % self.project_id, re.DOTALL) self.assertRegexpMatches(stdout, expected_output)
def test_load_csv_data(self): schema_path = os.path.join(self.resource_path, 'schema.json') data_path = os.path.join(self.resource_path, 'data.csv') with tests.capture_stdout() as mock_stdout: load_data(schema_path, data_path, self.project_id, self.dataset_id, self.table_id ) stdout = mock_stdout.getvalue() self.assertRegexpMatches(stdout, re.compile( r'Waiting for job to finish.*Job complete.', re.DOTALL))
def test_async_query(self): query = ( 'SELECT corpus FROM publicdata:samples.shakespeare ' 'GROUP BY corpus;') with tests.capture_stdout() as stdout: main( project_id=self.project_id, query_string=query, batch=False, num_retries=5, interval=1) value = stdout.getvalue().strip().split('\n').pop() self.assertIsNotNone( json.loads(value))
def test_main(self): with tests.capture_stdout() as mock_stdout: main( self.project_id, self.bucket_name, 'us-central1-f', 'test-instance', wait=False) stdout = mock_stdout.getvalue() expected_output = re.compile( (r'Instances in project %s and zone us-central1-.* - test-instance' r'.*Deleting instance.*done..$') % self.project_id, re.DOTALL) self.assertRegexpMatches( stdout, expected_output)
def test_stream_row_to_bigquery(self): with open( os.path.join(self.resource_path, 'streamrows.json'), 'r') as rows_file: rows = json.load(rows_file) streaming.get_rows = lambda: rows with capture_stdout() as stdout: streaming.main( self.constants['projectId'], self.constants['datasetId'], self.constants['newTableId'], 5) results = stdout.getvalue().split('\n') self.assertIsNotNone(json.loads(results[0]))
def test_stream_row_to_bigquery(self): with open( os.path.join(self.resource_path, 'streamrows.json'), 'r') as rows_file: rows = json.load(rows_file) streaming.get_rows = lambda: rows with capture_stdout() as stdout: streaming.main( self.project_id, self.dataset_id, self.table_id, num_retries=5) results = stdout.getvalue().split('\n') self.assertIsNotNone(json.loads(results[0]))
def test_version(self): with capture_stdout() as out: run(CmdArguments(command='version')) self.assertEqual(out.getvalue(), yozuch_version + '\n')