class TestDelete(unittest.TestCase): def setUp(self): self.connection = Connection(username, password) delete_projects_by_name(self.connection, test_project_name) self.project = Project(self.connection).create(test_project_name, gd_token) def tearDown(self): self.project.delete() def test_column_maql_delete(self): dataset = Department(self.project) dataset.create() expected_maql_delete_department_row = "DELETE FROM {attr.department.department}"\ + " WHERE {label.department.city} IN (\"Boston\", \"NYC\");" self.assertEquals( expected_maql_delete_department_row, dataset.get_maql_delete(where_clause="{label.department.city} IN (\"Boston\", \"NYC\")") ) expected_maql_delete_department_row_with_ids = "DELETE FROM {attr.department.department}"\ + " WHERE {label.department.department} IN (\"d1\", \"d2\");" self.assertEquals( expected_maql_delete_department_row_with_ids, dataset.get_maql_delete(where_values=["d1", "d2"]) ) self.assertEquals( expected_maql_delete_department_row_with_ids, dataset.get_maql_delete(column=dataset.department, where_values=["d1", "d2"]) ) expected_maql_delete_city_row = "DELETE FROM {attr.department.city}"\ + " WHERE {label.department.city} IN (\"Boston\", \"NYC\");" self.assertEquals( expected_maql_delete_city_row, dataset.get_maql_delete( column=dataset.city, where_clause="{label.department.city} IN (\"Boston\", \"NYC\")" ) ) self.assertEquals( expected_maql_delete_city_row, dataset.get_maql_delete( column=dataset.city, where_values=["Boston", "NYC"] ) ) self.assertRaises(RowDeletionError, dataset.get_maql_delete) expected_maql_delete_row_with_id = "DELETE FROM {attr.department.city}"\ + " WHERE {label.department.city} IN (119, 120);" self.assertEquals( expected_maql_delete_row_with_id, dataset.get_maql_delete( column=dataset.city, where_values=[119, 120] ) )
def test_create_and_delete_project(self): project = Project(self.connection).create(TEST_PROJECT_NAME) self.assert_(project is not None) self.assert_(project.id is not None) project.delete() self.assertRaises(ProjectNotOpenedError, project.delete) self.assertRaises(ProjectNotFoundError, project.load, name=TEST_PROJECT_NAME)
def test_create_and_delete_project(self): project = Project(self.connection).create(test_project_name, gd_token) self.assert_(project is not None) self.assert_(project.id is not None) project.delete() self.assertRaises(ProjectNotOpenedError, project.delete) self.assertRaises(ProjectNotFoundError, project.load, name=test_project_name)
def test_create_structure(self): project = Project(self.connection).create(TEST_PROJECT_NAME) self.assertRaises(MaqlExecutionFailed, project.execute_maql, 'CREATE DATASET {dat') for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(project) self.assertRaises(DataSetNotFoundError, dataset.get_metadata, name=dataset.schema_name) dataset.create() self.assert_(dataset.get_metadata(name=dataset.schema_name)) project.delete()
class TestDashboard(unittest.TestCase): common_filters = [{"object_id": 126, "constraint": {"type": "floating", "from": -3, "to": -1}}] wildcard_filter = { 'attribute': 'label.page.page_name', 'value': 'fake_page' } output_path = './fake_page.pdf' def setUp(self): self.connection = Connection(username, password) self.project = Project(self.connection) self.project.load(test_dashboard_project_id) self.dashboard = Dashboard( self.project, user_id, test_dashboard_id, test_tab_id, test_dashboard_name ) def test_get_execution_context(self): expected_answer = '/gdc/projects/%(project_id)s/users/%(user_id)s/executioncontexts/' % { 'project_id': test_dashboard_project_id, 'user_id': user_id } self.dashboard._get_execution_context(self.common_filters) self.assertIn(expected_answer, self.dashboard.execution_context_response_uri) common_filters = [{"wrong_object_id": 126, "constraint": {"type": "floating", "from": -3, "to": -1}}] self.assertRaises(DashboardExportError, self.dashboard._get_execution_context, common_filters) def test_get_client_export(self): expected_answer = '/gdc/projects/%(project_id)s/clientexport/' % { 'project_id': test_dashboard_project_id } self.dashboard._get_client_export(self.common_filters, self.wildcard_filter) self.assertIn(expected_answer, self.dashboard.client_export_response_uri) def test_poll_for_dashboard_data(self): self.dashboard._poll_for_dashboard_data(self.common_filters, self.wildcard_filter) self.assertIsNotNone(self.dashboard.pdf_data) def test_save_as_pdf(self): self.dashboard.save_as_pdf(self.common_filters, self.wildcard_filter, self.output_path) try: os.remove(self.output_path) except: self.fail('pdf should be found') def test_saved_dashboard_is_empty(self): self.dashboard.save_as_pdf(self.common_filters, self.wildcard_filter, self.output_path) self.dashboard.EMPTY_SIZE = 10 # fake pdf size self.assertFalse(self.dashboard.saved_dashboard_is_empty(self.output_path)) self.dashboard.EMPTY_SIZE = 13109 # real pdf size self.assertTrue(self.dashboard.saved_dashboard_is_empty(self.output_path)) os.remove(self.output_path)
def test_execute_dml(self): project = Project(self.connection).create(test_project_name, gd_token) self.assertRaises(DMLExecutionFailed, project.execute_dml, 'DELETE _%$;') self.assertRaises(DMLExecutionFailed, project.execute_dml, '') dataset = examples.examples[0][1](project) dataset.upload() try: project.execute_dml('DELETE FROM {attr.department.department};') except DMLExecutionFailed, e: self.fail('project.execute_dml: unexpected exception: %s' % e)
def test_xml_schema(self): for (example, ExampleDataset) in examples.examples: schema = parseString( example.schema_xml.replace(' ', '').replace('\n', '')) dataset = ExampleDataset(Project(None)) gen_schema = parseString(get_xml_schema(dataset)) # TODO: test for XML content (not so easily comparable) self.assertEqual(len(schema.childNodes), len(gen_schema.childNodes)) self.assertEqual( len(schema.childNodes[0].childNodes), len(gen_schema.childNodes[0].childNodes), '%s != %s' % (', '.join(n.nodeName for n in schema.childNodes[0].childNodes), ', '.join(n.nodeName for n in gen_schema.childNodes[0].childNodes))) self.assertEqual( len(schema.childNodes[0].childNodes[1].childNodes), len(gen_schema.childNodes[0].childNodes[1].childNodes), '%s != %s (%s)' % (', '.join( n.nodeName for n in schema.childNodes[0].childNodes[1].childNodes), ', '.join(n.nodeName for n in gen_schema.childNodes[0]. childNodes[1].childNodes), example))
def setUp(self): self.connection = Connection(username, password) self.project = Project(self.connection) self.project.load(test_dashboard_project_id) self.dashboard = Dashboard( self.project, user_id, test_dashboard_id, test_tab_id, test_dashboard_name )
def test_dataset_maql(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(Project(None)) maql_generated = dataset.get_maql() diff = '\n'.join( difflib.unified_diff(maql_generated.splitlines(), example.maql.splitlines(), lineterm='')) self.assertEquals(example.maql, maql_generated, diff)
class TestDataset(unittest.TestCase): def setUp(self): self.connection = Connection(username, password, debug=0) #drop all the test projects: delete_projects_by_name(self.connection, TEST_PROJECT_NAME) self.project = Project(self.connection).create(TEST_PROJECT_NAME) def tearDown(self): self.project.delete() def test_create_date_dimension(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) date_dimension = dataset.get_date_dimension() if date_dimension: DateDimension(self.project).create(name=date_dimension.schemaReference, include_time=date_dimension.datetime) # TODO: verify the creation def test_upload_dataset(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) dataset.upload() dataset_metadata = dataset.get_metadata(name=dataset.schema_name) self.assert_(dataset_metadata['dataUploads']) self.assertEquals('OK', dataset_metadata['lastUpload']['dataUploadShort']['status']) dataset.upload() # TODO: check different data for the upload def test_date_maql(self): date_dimension = DateDimension(self.project) self.assertEquals('INCLUDE TEMPLATE "URN:GOODDATA:DATE"', date_dimension.get_maql()) self.assertEquals('INCLUDE TEMPLATE "URN:GOODDATA:DATE" MODIFY (IDENTIFIER "test", TITLE "Test");\n\n', date_dimension.get_maql('Test')) self.assertEquals(examples.forex.date_dimension_maql, date_dimension.get_maql('Forex', include_time=True)) self.assertEquals(examples.forex.date_dimension_maql.replace('forex', 'xerof').replace('Forex', 'Xerof'), date_dimension.get_maql('Xerof', include_time=True)) def test_sli_manifest(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(Project(None)) sli_manifest = dataset.get_sli_manifest() self.assertEqual(example.sli_manifest, sli_manifest)
class TestReport(unittest.TestCase): def setUp(self): self.connection = Connection(username, password) self.project = Project(self.connection) self.project.load(report_project_id) def test_exec_report(self): report = Report(self.project, test_report_id) report.execute_report() self.assertTrue(report.exec_result) def test_export_report(self): report = Report(self.project, test_report_id) report.export_report() self.assertTrue(report.export_download_uri) report.exec_result = 'fake' self.assertRaises(ReportExportFailed, report.export_report) def test_get_report(self): report = Report(self.project, test_report_id) report.get_report() self.assertTrue(report.report_content) self.assertFalse(report.report_content[0] == '{') def test_get_empty_report(self): report = Report(self.project, test_empty_report_id) report.get_report() self.assertEquals(report.report_content, '') def test_save_report(self): report = Report(self.project, test_report_id) file_path = './test_report.txt' report.save_report(file_path) try: with open(file_path): pass except IOError: self.fail() os.remove(file_path)
def handle(self, **options): '''GoodData upload command''' use_datasets = options.get('datasets').split(',') if options.get('datasets') else None gd_classes = discover_gooddata_modules() if use_datasets: for app_cls_name in gd_classes.keys(): if app_cls_name not in use_datasets: del gd_classes[app_cls_name] connection = Connection(settings.GOODDATA_USERNAME, settings.GOODDATA_PASSWORD) projects = {} for dataset_class in gd_classes.itervalues(): if not dataset_class.project_name: raise AttributeError('project_name not defined in %s' % dataset_class) if dataset_class.project_name not in projects: projects[dataset_class.project_name] = Project(connection).load(name=dataset_class.project_name) dataset = dataset_class(projects[dataset_class.project_name]) dataset.upload()
from gooddataclient.connection import Connection from gooddataclient.project import Project from datasets.quotes import Quotes connection = Connection('email', 'password') proj_name = 'quotesFinal9242012' print '\n\n\nLoading Project: ', proj_name project = Project(connection).create(name=proj_name) project = Project(connection).load(name=proj_name) dataset = Quotes(project) dataset.upload()
def setUp(self): self.connection = Connection(username, password, debug=0) #drop all the test projects: delete_projects_by_name(self.connection, TEST_PROJECT_NAME) self.project = Project(self.connection).create(TEST_PROJECT_NAME)
class TestState(unittest.TestCase): def setUp(self): self.connection = Connection(username, password) delete_projects_by_name(self.connection, test_project_name) self.project = Project(self.connection).create(test_project_name, gd_token) def tearDown(self): self.project.delete() def test_has_properties(self): department = examples.examples[0][1](self.project) department.create() worker = examples.examples[1][1](self.project) worker.create() salary = examples.examples[2][1](self.project) salary.create() # attributes self.assertTrue(department.has_attribute('department')) self.assertTrue(department.has_attribute('city')) self.assertFalse(department.has_attribute('town')) # facts self.assertTrue(salary.has_fact('payment')) self.assertFalse(worker.has_fact('name')) self.assertFalse(worker.has_fact('age')) # labels self.assertTrue(department.has_label('name')) self.assertFalse(department.has_label('city')) self.assertFalse(department.has_label('building')) # references self.assertTrue(worker.has_reference('department')) self.assertFalse(worker.has_reference('jungle')) self.assertTrue(salary.has_reference('worker')) # dates self.assertTrue(salary.has_date('payday')) self.assertFalse(salary.has_date('expires_at')) self.assertFalse(department.has_date('birthday')) def test_remote_columns(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) dataset.create() for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) columns = dataset.get_remote_columns() self.assertListEqual(sorted(columns), sorted(dict(dataset._columns).keys())) for col_name, col in columns.iteritems(): dataset_col = getattr(dataset, col_name) self.assertIs(type(col), type(dataset_col)) # references have no title on the API... if not isinstance(col, Reference): self.assertEqual(col.title, dataset_col.title) self.assertEqual(col.dataType, dataset_col.dataType) self.assertEqual(col.reference, dataset_col.reference) self.assertEqual(col.schemaReference, dataset_col.schemaReference) self.assertEqual(col.datetime, dataset_col.datetime) def test_remote_diff(self): Department = examples.examples[0][1] Department(self.project).create() Worker = examples.examples[1][1] Worker(self.project).create() Salary = examples.examples[2][1] Salary(self.project).create() old_city = Department.city old_name = Department.name Department.name = HyperLink(title='Name', reference='department', folder='Department', dataType='VARCHAR(128)') Department.city = None Department.town = Attribute(title='Town', folder='Department', dataType='VARCHAR(20)') remote_diff = Department(self.project).get_remote_diff() self.assertIn('town', remote_diff['added']) self.assertIn('name', remote_diff['altered']) self.assertIn('city', remote_diff['deleted']) self.assertEqual(remote_diff['added']['town'], Department.town) self.assertEqual(remote_diff['altered']['name']['new'], Department.name) self.assertEqual(remote_diff['altered']['name']['old'], old_name) self.assertEqual(remote_diff['deleted']['city'], old_city) old_dpt = Worker.department Worker.department = None remote_diff = Worker(self.project).get_remote_diff() self.assertIn('department', remote_diff['deleted']) self.assertFalse(remote_diff['added']) self.assertFalse(remote_diff['altered']) self.assertEqual(remote_diff['deleted']['department'], old_dpt) Salary.payment = Fact(title='Payment', folder='Salary', dataType='BIGINT') remote_diff = Salary(self.project).get_remote_diff() self.assertIn('payment', remote_diff['altered']) self.assertFalse(remote_diff['added']) self.assertFalse(remote_diff['deleted']) def test_remote_diff_factsof(self): class Snapshot(Dataset): wrong_snapshot_id = Attribute(title='snapshot_id', dataType='VARCHAR(20)') Snapshot(self.project).create() remote_diff = Snapshot(self.project).get_remote_diff() self.assertTrue('factsof' not in remote_diff['deleted']) Snapshot.real_snapshot_id = ConnectionPoint(title='snapshot_id') remote_diff = Snapshot(self.project).get_remote_diff() self.assertTrue('factsof' in remote_diff['deleted'])
def setUp(self): self.connection = Connection(username, password) self.project = Project(self.connection) self.project.load(report_project_id)
def setUp(self): self.connection = Connection(username, password) delete_projects_by_name(self.connection, test_project_name) self.project = Project(self.connection).create(test_project_name, gd_token)
class TestDataset(unittest.TestCase): def setUp(self): self.connection = Connection(username, password) delete_projects_by_name(self.connection, test_project_name) self.project = Project(self.connection).create(test_project_name, gd_token) def tearDown(self): self.project.delete() def test_create_dataset(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) dataset.create() dataset.get_metadata(dataset.schema_name) def test_upload_dataset(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) dataset.upload() dataset_metadata = dataset.get_metadata(name=dataset.schema_name) self.assertTrue(dataset_metadata['dataUploads']) self.assertEquals('OK', dataset_metadata['lastUpload']['dataUploadShort']['status']) def test_date_dimension(self): date_dimension = DateDimension(self.project) self.assertEquals('INCLUDE TEMPLATE "URN:GOODDATA:DATE"', date_dimension.get_maql()) self.assertEquals('INCLUDE TEMPLATE "URN:GOODDATA:DATE" MODIFY (IDENTIFIER "test", TITLE "Test");\n\n', date_dimension.get_maql('Test')) self.assertEquals(examples.forex.date_dimension_maql, date_dimension.get_maql('Forex', include_time=True)) self.assertEquals(examples.forex.date_dimension_maql.replace('forex', 'xerof').replace('Forex', 'Xerof'), date_dimension.get_maql('Xerof', include_time=True)) def test_sli_manifest(self): for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) sli_manifest = dataset.get_sli_manifest() self.assertIsInstance(sli_manifest, dict) self.assertEquals('INCREMENTAL', sli_manifest['dataSetSLIManifest']['parts'][0]['mode']) sli_manifest = dataset.get_sli_manifest(full_upload=True) self.assertEquals('FULL', sli_manifest['dataSetSLIManifest']['parts'][0]['mode']) def test_dates_sli_manifest(self): _datetime = Date(title='Created at', schemaReference='created_at', datetime=True) _datetime.set_name_and_schema('_name', '_schema') self.assertEquals( 'yyyy-MM-dd HH:mm:SS', _datetime.get_sli_manifest_part(full_upload=False)[0]['constraints']['date'] ) _date = Date(title='Created at', schemaReference='created_at', datetime=False) _date.set_name_and_schema('_name', '_schema') self.assertEquals( 'yyyy-MM-dd', _date.get_sli_manifest_part(full_upload=False)[0]['constraints']['date'] ) def test_no_upload(self): ''' test that no connection to GD API is made. Uses a mock connection that will raise an error if put is called ''' csv_file = os.path.join(os.path.abspath('./'), 'tmp.csv') def mock_put(uri, data, headers): raise Exception('GD API should not be called') self.project.connection.webdav.put = mock_put for (example, ExampleDataset) in examples.examples: dataset = ExampleDataset(self.project) dataset.upload( no_upload=True, keep_csv=True, csv_file=csv_file ) os.remove(csv_file) def test_exceptions(self): class DummyDataset(Dataset): pass dataset = DummyDataset(self.project) self.assertRaises(DataSetNotFoundError, dataset.get_metadata, 'dummy_dataset') self.assertRaises(NotImplementedError, dataset.data) def test_upload_dataset_with_csv(self): department, Department = examples.examples[0] self.dataset = Department(self.project) self.dataset.create() csv_input_path = 'tests/examples/department_data.csv' self.dataset.upload(csv_input_path=csv_input_path) dataset_metadata = self.dataset.get_metadata(name=self.dataset.schema_name) self.assertTrue(dataset_metadata['dataUploads']) self.assertEquals('OK', dataset_metadata['lastUpload']['dataUploadShort']['status'])
def test_validate_maql(self): project = Project(self.connection).create(test_project_name, gd_token) self.assertRaises(MaqlValidationFailed, project.execute_maql, 'CREATE DATASET {dat') self.assertRaises(AttributeError, project.execute_maql, '') project.delete()