Exemple #1
0
def _load_details(provider_name, run_id, test_name):
    if (provider_name, run_id) not in _cached_details:
        providers = get_providers()
        if not providers:
            raise NoRunDataException("No test providers could be loaded")

        if provider_name not in providers:
            raise ProviderNotFoundException("Requested subunit provider could "
                                            "not be found: " + provider_name)

        provider = providers[provider_name]
        try:
            stream = provider.get_stream(run_id)
            converted_run = convert_stream(stream, strip_details=False)

            # remap dict to allow direct access to details via test name
            dest = {}
            for entry in converted_run:
                dest[entry['name']] = entry['details']

            _cached_details[provider_name, run_id] = dest
        except (KeyError, IndexError):
            raise RunNotFoundException("Requested test run could not be found")

    details_map = _cached_details[provider_name, run_id]
    if test_name is None:
        return details_map
    else:
        if test_name in details_map:
            return details_map[test_name]
        else:
            raise TestNotFoundException(
                "Requested test could not be found in run")
Exemple #2
0
def _load_run(provider_name, run_id):
    if (provider_name, run_id) in _cached_run:
        return _cached_run[provider_name, run_id]

    providers = get_providers()
    if not providers:
        raise NoRunDataException("No test providers could be loaded")

    if provider_name not in providers:
        raise ProviderNotFoundException("Requested subunit provider could not "
                                        "be found")

    p = providers[provider_name]

    try:
        # assume first repo for now
        stream = p.get_stream(run_id)

        # strip details for now
        # TODO(provide method for getting details on demand)
        # (preferably for individual tests to avoid bloat)
        converted_run = convert_stream(stream, strip_details=True)
        _cached_run[provider_name, run_id] = converted_run

        return converted_run
    except KeyError:
        raise RunNotFoundException("Requested test run could not be found")
Exemple #3
0
def main():
    parser = ArgumentParser(description="Generates JSON data files for a "
                                        "StackViz site.")
    parser.add_argument("path",
                        help="The output directory. Will be created if it "
                             "doesn't already exist.")
    parser.add_argument("-z", "--gzip",
                        help="Enable gzip compression for data files.",
                        action="store_true")
    parser.add_argument("-f", "--stream-file",
                        action="append",
                        help="Include the given direct subunit stream; can be "
                             "used multiple times.")
    parser.add_argument("-r", "--repository",
                        action="append",
                        help="A directory containing a `.testrepository` to "
                             "include; can be used multiple times.")
    parser.add_argument("-i", "--stdin",
                        help="Read a direct subunit stream from standard "
                             "input.",
                        action="store_true")
    parser.add_argument("--dstat",
                        help="The path to the DStat log file (CSV-formatted) "
                             "to include.")

    args = parser.parse_args()

    if not os.path.exists(args.path):
        os.mkdir(args.path)

    dstat = None
    if args.dstat:
        print("Exporting DStat log")
        dstat = export_dstat(args.dstat, args.path, args.gzip)

    providers = tempest_subunit.get_providers(
        args.repository,
        args.stream_file,
        args.stdin)

    tempest_config_entries = []

    for provider in providers.values():
        print("Exporting Tempest provider: %s (%d)" % (provider.description,
                                                       provider.count))
        tempest_config_entries.extend(
            export_tempest(provider, args.path, dstat, args.gzip)
        )

    with open(os.path.join(args.path, 'config.json'), 'w') as f:
        json.dump({
            'tempest': tempest_config_entries
        }, f)
Exemple #4
0
 def test_export_file(self):
     tmp_fixture = self.useFixture(fixtures.TempDir())
     output_dir = tmp_fixture.path
     subunit_path = os.path.join(os.path.dirname(__file__),
                                 'fixtures', 'tempest.subunit')
     providers = tempest_subunit.get_providers(None, [subunit_path], None)
     export.export_tempest(list(providers.values())[0], output_dir, False)
     output_file = os.path.join(output_dir,
                                'tempest.subunit-0-details.json')
     j = json.load(open(output_file))
     assert "tempest.api.compute.admin" \
            ".test_agents.AgentsAdminTestJSON.test_create_agent" in j
Exemple #5
0
def main():
    parser = ArgumentParser(description="Generates JSON data files for a "
                            "StackViz site.")
    parser.add_argument("path",
                        help="The output directory. Will be created if it "
                        "doesn't already exist.")
    parser.add_argument("-z",
                        "--gzip",
                        help="Enable gzip compression for data files.",
                        action="store_true")
    parser.add_argument("-f",
                        "--stream-file",
                        action="append",
                        help="Include the given direct subunit stream; can be "
                        "used multiple times.")
    parser.add_argument("-r",
                        "--repository",
                        action="append",
                        help="A directory containing a `.testrepository` to "
                        "include; can be used multiple times.")
    parser.add_argument("-i",
                        "--stdin",
                        help="Read a direct subunit stream from standard "
                        "input.",
                        action="store_true")
    parser.add_argument("--dstat",
                        help="The path to the DStat log file (CSV-formatted) "
                        "to include.")

    args = parser.parse_args()

    if not os.path.exists(args.path):
        os.mkdir(args.path)

    dstat = None
    if args.dstat:
        print("Exporting DStat log")
        dstat = export_dstat(args.dstat, args.path, args.gzip)

    providers = tempest_subunit.get_providers(args.repository,
                                              args.stream_file, args.stdin)

    tempest_config_entries = []

    for provider in providers.values():
        print("Exporting Tempest provider: %s (%d)" %
              (provider.description, provider.count))
        tempest_config_entries.extend(
            export_tempest(provider, args.path, dstat, args.gzip))

    with open(os.path.join(args.path, 'config.json'), 'w') as f:
        json.dump({'tempest': tempest_config_entries}, f)
Exemple #6
0
def main():
    parser = ArgumentParser(description="Generates JSON data files for a "
                            "StackViz site.")
    parser.add_argument("path",
                        help="The output directory. Will be created if it "
                        "doesn't already exist.")
    parser.add_argument("-z",
                        "--gzip",
                        help="Enable gzip compression for data files.",
                        action="store_true")
    parser.add_argument("-e",
                        "--env",
                        help="Include Zuul metadata from environment "
                        "variables.",
                        action="store_true")
    parser.add_argument("-f",
                        "--stream-file",
                        action="append",
                        help="Include the given direct subunit stream; can be "
                        "used multiple times.")
    parser.add_argument("-r",
                        "--repository",
                        help="A directory containing a `.testrepository` to "
                        "include; can be used multiple times.")
    parser.add_argument("-i",
                        "--stdin",
                        help="Read a direct subunit stream from standard "
                        "input.",
                        action="store_true")
    parser.add_argument("--dstat",
                        help="The path to the DStat log file (CSV-formatted) "
                        "to include.")

    args = parser.parse_args()

    if not os.path.exists(args.path):
        os.mkdir(args.path)

    artifacts = []
    dataset = {
        'name': None,
        'url': None,
        'status': None,
        'ci_username': None,
        'pipeline': None,
        'change_id': None,
        'revision': None,
        'change_project': None,
        'change_subject': None,
        'artifacts': artifacts
    }

    if args.env:
        dataset.update(environment_params())

    if args.dstat:
        print("Exporting DStat log")
        dstat = export_dstat(args.dstat, args.path, args.gzip)
        artifacts.append(dstat)

    providers = tempest_subunit.get_providers(args.repository,
                                              args.stream_file, args.stdin)

    for provider in providers.values():
        print("Exporting Tempest provider: %s (%d)" %
              (provider.description, provider.count))
        artifacts.extend(export_tempest(provider, args.path, args.gzip))

    with open(os.path.join(args.path, 'config.json'), 'w') as f:
        json.dump({
            'deployer': False,
            'datasets': [dataset]
        },
                  f,
                  default=json_date_handler)
Exemple #7
0
def main():
    parser = ArgumentParser(description="Generates JSON data files for a "
                                        "StackViz site.")
    parser.add_argument("path",
                        help="The output directory. Will be created if it "
                             "doesn't already exist.")
    parser.add_argument("-z", "--gzip",
                        help="Enable gzip compression for data files.",
                        action="store_true")
    parser.add_argument("-e", "--env",
                        help="Include Zuul metadata from environment "
                             "variables.",
                        action="store_true")
    parser.add_argument("-f", "--stream-file",
                        action="append",
                        help="Include the given direct subunit stream; can be "
                             "used multiple times.")
    parser.add_argument("-r", "--repository",
                        help="A directory containing a `.testrepository` to "
                             "include; can be used multiple times.")
    parser.add_argument("-i", "--stdin",
                        help="Read a direct subunit stream from standard "
                             "input.",
                        action="store_true")
    parser.add_argument("--dstat",
                        help="The path to the DStat log file (CSV-formatted) "
                             "to include.")

    args = parser.parse_args()

    if not os.path.exists(args.path):
        os.mkdir(args.path)

    artifacts = []
    dataset = {
        'name': None,
        'url': None,
        'status': None,
        'ci_username': None,
        'pipeline': None,
        'change_id': None,
        'revision': None,
        'change_project': None,
        'change_subject': None,
        'artifacts': artifacts
    }

    if args.env:
        dataset.update(environment_params())

    if args.dstat:
        print("Exporting DStat log")
        dstat = export_dstat(args.dstat, args.path, args.gzip)
        artifacts.append(dstat)

    providers = tempest_subunit.get_providers(
        args.repository,
        args.stream_file,
        args.stdin)

    for provider in providers.values():
        print("Exporting Tempest provider: %s (%d)" % (provider.description,
                                                       provider.count))
        artifacts.extend(export_tempest(provider, args.path, args.gzip))

    with open(os.path.join(args.path, 'config.json'), 'w') as f:
        json.dump({
            'deployer': False,
            'datasets': [dataset]
        }, f, default=json_date_handler)
Exemple #8
0
def main():
    parser = ArgumentParser(description="Generates a self-contained, static "
                                        "StackViz site at the given path.")
    parser.add_argument("path",
                        help="The output directory. Will be created if it "
                             "doesn't already exist.")
    parser.add_argument("--ignore-bower",
                        help="Ignore missing Bower components.",
                        action="store_true")
    parser.add_argument("-z", "--gzip",
                        help="Enable gzip compression for data files.",
                        action="store_true")
    parser.add_argument("-f", "--stream-file",
                        action="append",
                        help="Include the given direct subunit stream.")
    parser.add_argument("-r", "--repository",
                        action="append",
                        help="A directory containing a `.testrepository` to "
                             "include. If not provided, the `settings.py` "
                             "configured values will be used.")
    parser.add_argument("-i", "--stdin",
                        help="Read a direct subunit stream from standard "
                             "input.",
                        action="store_true")
    parser.add_argument("--dstat",
                        help="The path to the DStat log file (CSV-formatted) "
                             "to include. If not provided, the `settings.py` "
                             "configured value will be used.")

    args = parser.parse_args()

    if not args.ignore_bower:
        if not os.listdir(os.path.join(_base, 'static', 'components')):
            print("Bower components have not been installed, please run "
                  "`bower install`")
            return 1

    if os.path.exists(args.path):
        if os.listdir(args.path):
            print("Destination exists and is not empty, cannot continue")
            return 1
    else:
        os.mkdir(args.path)

    init_django(args)

    print("Copying static files ...")
    shutil.copytree(os.path.join(_base, 'static'),
                    os.path.join(args.path, 'static'))

    for path in EXPORT_PATHS:
        print("Rendering:", path)
        export_single_page(path, args.path)

    for provider in tempest_subunit.get_providers().values():
        for i in range(provider.count):
            param = (provider.name, i)

            print("Rendering views for tempest run %s #%d" % param)
            export_single_page('/tempest_timeline_%s_%d.html' % param,
                               args.path)
            export_single_page('/tempest_results_%s_%d.html' % param,
                               args.path)

            print("Exporting data for tempest run %s #%d" % param)
            export_single_page('/tempest_api_tree_%s_%d.json' % param,
                               args.path, args.gzip)
            export_single_page('/tempest_api_raw_%s_%d.json' % param,
                               args.path, args.gzip)
            export_single_page('/tempest_api_details_%s_%d.json' % param,
                               args.path, args.gzip)

    print("Exporting DStat log: dstat_log.csv")
    export_single_page('/dstat_log.csv', args.path, args.gzip)