def main(): try: default_metrics_package_dir = getPackageDir('verify_metrics') except lsst.pex.exceptions.NotFoundError: default_metrics_package_dir = None parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("package_dir", default=default_metrics_package_dir, type=str, nargs='?', help="Filepath of the metrics package to be checked.") args = parser.parse_args() print('Linting {}.'.format(args.package_dir)) metric_repo = MetricSet.load_metrics_package(args.package_dir) print('Passed: metrics/') print('\tParsed {0:d} metric sets.'.format(len(metric_repo))) spec_set = SpecificationSet.load_metrics_package(args.package_dir) print('Passed: specs/') print('\tParsed {0:d} specifications.'.format(len(spec_set))) print("\nAll tests passed.")
def ingest_data(filenames, metrics_package): """Load JSON files into a list of lsst.validate.base measurement Jobs. Parameters ---------- filenames : list of str Filenames of JSON files to load. Returns ------- job_list : list of lsst.validate.base.Job Each element is the Job representation of the JSON file. """ jobs = {} # Read in JSON output from metrics run for filename in filenames: with open(filename) as fh: data = json.load(fh) job = Job.deserialize(**data) filter_name = job.meta['filter_name'] metrics = MetricSet.load_metrics_package(metrics_package) job.metrics.update(metrics) specs = SpecificationSet.load_metrics_package(metrics_package) job.specs.update(specs) jobs[filter_name] = job return jobs
def main(): """Main entrypoint for the ``lint_metrics.py`` script. """ args = build_argparser().parse_args() print('Linting {}.'.format(args.package_dir)) metric_repo = MetricSet.load_metrics_package(args.package_dir) print('Passed: metrics/') print('\tParsed {0:d} metric sets.'.format(len(metric_repo))) spec_set = SpecificationSet.load_metrics_package(args.package_dir) print('Passed: specs/') print('\tParsed {0:d} specifications.'.format(len(spec_set))) print("\nAll tests passed.")
def run(validation_drp_report_filenames, output_file, srd_level=None, release_specs_package=None, release_level=None, metrics_package='verify_metrics'): """ Parameters --- validation_drp_report_filenames : list or str filepaths for JSON files. output_file : str filepath of output RST file. srd_level : str SRD level to quote. One of ['design', 'minimum', 'stretch'] release_specs_package : str, optional Name of package to use in constructing the release level specs. release_level : str, A specification level in the 'release_specs_file' E.g., 'FY17' or 'ORR' Products --- Writes table of performance metrics to an RST file. """ input_objects = ingest_data(validation_drp_report_filenames, metrics_package) input_table = objects_to_table(input_objects, level=srd_level) if input_table is None: msg = "Table from Job is None. Returning without writing table" print(msg) return if release_specs_package is not None and release_level is not None: tmp_specs = SpecificationSet.load_metrics_package( release_specs_package, subset='validate_drp') release_specs = tmp_specs.subset(spec_tags=['release']) add_release_spec(input_table, release_specs, release_level, srd_level) write_report(input_table, output_file)
def load_json_output(filepath, metrics_package='verify_metrics'): """Read JSON from a file into a job object. Currently just does a trivial de-serialization with no checking to make sure that one results with a valid validate.base.job object. Parameters ---------- filepath : `str` Source file name for JSON output. Returns ------- job : A `validate.base.job` object. """ with open(filepath, 'r') as infile: json_data = json.load(infile) job = Job.deserialize(**json_data) metrics = MetricSet.load_metrics_package(metrics_package) job.metrics.update(metrics) specs = SpecificationSet.load_metrics_package(metrics_package) job.specs.update(specs) return job