def test_dcos_diagnostics_bundle_create_download_delete( dcos_api_session: DcosApiSession, use_legacy_api: bool) -> None: """ test bundle create, read, delete workflow """ health_url = dcos_api_session.default_url.copy( query='cache=0', path='system/health/v1', ) diagnostics = Diagnostics( default_url=health_url, masters=dcos_api_session.masters, all_slaves=dcos_api_session.all_slaves, session=dcos_api_session.copy().session, use_legacy_api=use_legacy_api, ) app, test_uuid = test_helpers.marathon_test_docker_app('diag-bundle', constraints=[]) with dcos_api_session.marathon.deploy_and_cleanup(app, timeout=120): bundle = _create_bundle(diagnostics) _check_diagnostics_bundle_status(dcos_api_session) _download_and_extract_bundle(dcos_api_session, bundle, diagnostics) _download_and_extract_bundle_from_another_master( dcos_api_session, bundle, diagnostics) _delete_bundle(diagnostics, bundle)
def _dump_diagnostics(request: requests.Request, dcos_api_session: DcosApiSession) -> Generator: """Download the zipped diagnostics bundle report from each master in the cluster to the home directory. This should be run last. The _ prefix makes sure that pytest calls this first out of the autouse session scope fixtures, which means that its post-yield code will be executed last. * There is no official way to ensure fixtures are called in a certain order https://github.com/pytest-dev/pytest/issues/1216 * However it seems that fixtures at the same scope are called alphabetically https://stackoverflow.com/a/28593102/1436300 """ yield make_diagnostics_report = os.environ.get('DIAGNOSTICS_DIRECTORY') is not None if make_diagnostics_report: creation_start = datetime.datetime.now() last_datapoint = { 'time': None, 'value': 0 } health_url = dcos_api_session.default_url.copy( query='cache=0', path='system/health/v1', ) diagnostics = Diagnostics( default_url=health_url, masters=dcos_api_session.masters, all_slaves=dcos_api_session.all_slaves, session=dcos_api_session.copy().session, ) log.info('Create diagnostics report for all nodes') diagnostics.start_diagnostics_job() log.info('\nWait for diagnostics job to complete') diagnostics.wait_for_diagnostics_job(last_datapoint=last_datapoint) duration = last_datapoint['time'] - creation_start # type: ignore log.info('\nDiagnostis bundle took {} to generate'.format(duration)) log.info('\nWait for diagnostics report to become available') diagnostics.wait_for_diagnostics_reports() log.info('\nDownload zipped diagnostics reports') bundles = diagnostics.get_diagnostics_reports() diagnostics.download_diagnostics_reports(diagnostics_bundles=bundles) else: log.info('\nNot downloading diagnostics bundle for this session.')