def test_parent_path_to_list(): assert Server._parse_project_path_to_list(None) == [] assert Server._parse_project_path_to_list("") == [""] assert Server._parse_project_path_to_list("parent") == ["parent"] assert Server._parse_project_path_to_list("parent/child") == [ "parent", "child" ]
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) if args.parent_project_path: logger.debug("parent path: {}".format(args.parent_project_path)) try: logger.debug( _("deleteproject.status").format(args.parent_project_path, args.project_name)) project = Server.get_project_by_name_and_parent_path( logger, server, args.project_name, args.parent_project_path) except TSC.ServerResponseError as e: Errors.exit_with_error( logger, _("publish.errors.unexpected_server_response"), e) project_id = project.id try: logger.info(_("deleteproject.status").format(args.project_name)) server.projects.delete(project_id) logger.info(_("common.output.succeeded")) except TSC.ServerResponseError as e: Errors.exit_with_error(logger, "tabcmd.result.failure.delete.project", e)
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) if args.parent_project_path is not None: project_path = Server.get_project_by_name_and_parent_path( logger, server, None, args.parent_project_path) else: project_path = None try: project = PublishSamplesCommand.get_project_by_name_and_parent_path( logger, server, args.project_name, project_path) except Exception as e: Errors.exit_with_error( logger, _("tabcmd.report.error.publish_samples.expected_project"), exception=e) try: server.projects.update(project, samples=True) except Exception as e: Errors.exit_with_error(logger, _("tabcmd.result.failure.publish_samples"), exception=e)
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) try: if args.datasource: logger.info(_("deleteextracts.for.datasource").format(args.datasource)) data_source_item = Server.get_data_source_item(logger, server, args.datasource) job = server.datasources.delete_extract(data_source_item) elif args.workbook: logger.info(_("deleteextracts.for.workbook_name").format(args.workbook)) workbook_item = Server.get_workbook_item(logger, server, args.workbook) job = server.workbooks.delete_extract(workbook_item) except TSC.ServerResponseError as e: Errors.exit_with_error(logger, _("deleteextracts.errors.error"), e) logger.info(_("common.output.job_queued_success")) logger.debug("Extract deletion queued with JobID: {}".format(job.id))
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) try: logger.info(_("tabcmd.find.group").format(args.name)) group_id = Server.find_group_id(logger, server, args.name) logger.info(_("deletegroup.status").format(group_id)) server.groups.delete(group_id) logger.info(_("tabcmd.result.succeeded")) except TSC.ServerResponseError as e: Errors.exit_with_error(logger, "tabcmd.result.failed.delete.group", e)
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) site_item = Server.get_site_for_command_or_throw(logger, server, args) try: logger.info(_("reencryptextracts.status").format(site_item.name)) job = server.sites.encrypt_extracts(site_item.id) except TSC.ServerResponseError as e: Errors.exit_with_error(logger, e) logger.info(_("common.output.job_queued_success")) logger.debug("Extract re-encryption queued with JobID: {}".format(job.id))
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) creation_call = None try: logger.debug( "Extract params: encrypt={}, include_all={}, datasources={}". format(args.encrypt, args.include_all, args.embedded_datasources)) if args.datasource: data_source_item = Server.get_data_source_item( logger, server, args.datasource) logger.info( _("createextracts.for.datasource").format(args.datasource)) job = server.datasources.create_extract(data_source_item, encrypt=args.encrypt) elif args.workbook: workbook_item = Server.get_workbook_item( logger, server, args.workbook) logger.info( _("createextracts.for.workbook_name").format( args.workbook)) job = server.workbooks.create_extract( workbook_item, encrypt=args.encrypt, includeAll=args.include_all, datasources=args.embedded_datasources, ) except TSC.ServerResponseError as e: Errors.exit_with_error(logger, exception=e) logger.info(_("common.output.job_queued_success")) logger.debug("Extract creation queued with JobID: {}".format(job.id))
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) parent_id = None readable_name = args.project_name if args.parent_project_path: try: logger.info( _("tabcmd.find.parent_project").format( args.parent_project_path)) parent = Server.get_project_by_name_and_parent_path( logger, server, None, args.parent_project_path) except TSC.ServerResponseError as exc: Errors.exit_with_error( logger, _("publish.errors.server_resource_not_found"), exc) readable_name = "{0}/{1}".format(args.parent_project_path, args.project_name) parent_id = parent.id logger.debug("parent project = `{0}`, id = {1}".format( args.parent_project_path, parent_id)) logger.info(_("createproject.status").format(readable_name)) new_project = TSC.ProjectItem(args.project_name, args.description, None, parent_id) try: project_item = server.projects.create(new_project) logger.info(_("common.output.succeeded")) return project_item except TSC.ServerResponseError as e: if Errors.is_resource_conflict(e): if args.continue_if_exists: logger.info( _("tabcmd.result.already_exists").format( args.project_name)) return else: Errors.exit_with_error( logger, _("tabcmd.result.already_exists").format( args.project_name)) Errors.exit_with_error( logger, _("publish.errors.unexpected_server_response"), e)
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) site_item = Server.get_site_for_command_or_throw(logger, server, args) if args.url: site_item.content_url = args.url if args.user_quota: site_item.user_quota = args.user_quota if args.storage_quota: site_item.storage_quota = args.storage_quota if args.status: site_item.state = args.status try: logger.info(_("editsite.status").format(site_item.name)) server.sites.update(site_item) logger.info(_("common.output.succeeded")) except TSC.ServerResponseError as e: Errors.exit_with_error( logger, _("publish.errors.unexpected_server_response"), e)
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) if args.project_name: try: project_id = Server.get_project_by_name_and_parent_path( logger, server, args.project_name, args.parent_project_path ) except Exception as exc: Errors.exit_with_error(logger, _("publish.errors.server_resource_not_found"), exc) else: project_id = "" args.project_name = "default" args.parent_project_path = "" publish_mode = PublishCommand.get_publish_mode(args) source = PublishCommand.get_filename_extension_if_tableau_type(logger, args.filename) logger.info(_("publish.status").format(args.filename)) if source in ["twbx", "twb"]: new_workbook = TSC.WorkbookItem(project_id, name=args.name, show_tabs=args.tabbed) try: new_workbook = server.workbooks.publish(new_workbook, args.filename, publish_mode) except IOError as ioe: Errors.exit_with_error(logger, ioe) logger.info(_("publish.success") + "\n{}".format(new_workbook.webpage_url)) elif source in ["tds", "tdsx", "hyper"]: new_datasource = TSC.DatasourceItem(project_id, name=args.name) try: new_datasource = server.datasources.publish(new_datasource, args.filename, publish_mode) except IOError as ioe: Errors.exit_with_error(logger, exc) logger.info(_("publish.success") + "\n{}".format(new_datasource.webpage_url))
def test_get_project(self, mock_server): mock_server.projects.get = getter Server.get_project_by_name_and_parent_path(mock.MagicMock(), mock_server, "random_name", "") getter.assert_called()
def test_get_project(self): logger = log(__class__.__name__, "info") server = E2EServerTests.test_log_in() Server.get_project_by_name_and_parent_path(logger, server, "Default", None)
def test_evaluate_file_name_url_no_ext_fails(self): filename = "filename/filename/filename/file" with self.assertRaises(ValueError): filetype = Server.get_filename_extension_if_tableau_type( mock_logger, filename)
def test_evaluate_file_path(self): filename = "../assets/filename.twb" filetype = Server.get_filename_extension_if_tableau_type( mock_logger, filename) assert filetype == "twb", filetype
def test_evaluate_file_name_url(self): filename = "filename.twbx" filetype = Server.get_filename_extension_if_tableau_type( mock_logger, filename) assert filetype == "twbx", filetype
def run_command(args): logger = log(__class__.__name__, args.logging_level) logger.debug(_("tabcmd.launching")) session = Session() server = session.create_session(args) refresh_action = "refresh" if args.addcalculations or args.removecalculations: logger.warning( "Data Acceleration tasks are deprecated and this parameter has no effect." "It will be removed in a future update.") # are these two mandatory? mutually exclusive? # docs: the REST method always runs a full refresh even if the refresh type is set to incremental. if args.incremental: # docs: run the incremental refresh logger.warn( "Incremental refresh is not yet available through the new tabcmd" ) # if args.synchronous: # docs: run a full refresh and poll until it completes # else: run a full refresh but don't poll for completion container = None if args.project_name: try: container = Server.get_project_by_name_and_parent_path( logger, server, args.project_name, args.parent_project_path) except Exception as ex: logger.warning( "Could not find project {}/{}. Continuing without.".format( args.parent_project_path, args.project_name)) job = None try: # TODO: use the container in the search if args.datasource: logger.debug(_("export.status").format(args.datasource)) datasource_id = Server.get_data_source_id( logger, server, args.datasource, container) logger.info( _("refreshextracts.status_refreshed").format( _("content_type.datasource"), args.datasource)) job: TSC.JobItem = server.datasources.refresh(datasource_id) elif args.workbook: logger.debug(_("export.status").format(args.workbook)) workbook_id = Server.get_workbook_id(logger, server, args.workbook, container) logger.info( _("refreshextracts.status_refreshed").format( _("content_type.workbook"), args.workbook)) job: TSC.JobItem = server.workbooks.refresh(workbook_id) elif args.url: logger.error("URL not yet implemented") except TSC.ServerResponseError as e: Errors.exit_with_error(logger, _("refreshextracts.errors.error"), e) logger.info(_("common.output.job_queued_success")) if args.synchronous: # maintains a live connection to the server while the refresh operation is underway, polling every second # until the background job is done. <job id="JOB_ID" mode="MODE" type="RefreshExtract" /> logger.info("Waiting for refresh job to begin ....") try: polling2.poll( lambda: logger.info(".") and job.started_at is not None, step=1, timeout=args.timeout) except polling2.TimeoutException as te: Errors.exit_with_error(logger, _("messages.timeout_error.summary")) logger.info("Job started at {}".format(job.started_at)) try: polling2.poll( lambda: logger.info("{}".format(job.progress)) and job. finish_code != -1, step=1, timeout=args.timeout, ) logger.info("Job completed at {}".format(job.completed_at)) except polling2.TimeoutException as te: Errors.exit_with_error(logger, _("messages.timeout_error.summary")) else: logger.info(_("common.output.job_queued_success")) logger.debug("Extract refresh started with JobID: {0}".format( job.id))