def main(): try: default_metrics_package_dir = getPackageDir('verify_metrics') except lsst.pex.exceptions.NotFoundError: default_metrics_package_dir = None parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument("package_dir", default=default_metrics_package_dir, type=str, nargs='?', help="Filepath of the metrics package to be checked.") args = parser.parse_args() print('Linting {}.'.format(args.package_dir)) metric_repo = MetricSet.load_metrics_package(args.package_dir) print('Passed: metrics/') print('\tParsed {0:d} metric sets.'.format(len(metric_repo))) spec_set = SpecificationSet.load_metrics_package(args.package_dir) print('Passed: specs/') print('\tParsed {0:d} specifications.'.format(len(spec_set))) print("\nAll tests passed.")
def ingest_data(filenames, metrics_package): """Load JSON files into a list of lsst.validate.base measurement Jobs. Parameters ---------- filenames : list of str Filenames of JSON files to load. Returns ------- job_list : list of lsst.validate.base.Job Each element is the Job representation of the JSON file. """ jobs = {} # Read in JSON output from metrics run for filename in filenames: with open(filename) as fh: data = json.load(fh) job = Job.deserialize(**data) filter_name = job.meta['filter_name'] metrics = MetricSet.load_metrics_package(metrics_package) job.metrics.update(metrics) specs = SpecificationSet.load_metrics_package(metrics_package) job.specs.update(specs) jobs[filter_name] = job return jobs
def __init__(self, repository, collection, metrics_package, spec, dataset_name): # Hard coding verify_metrics as the packager for now. # It would be easy to pass this in as an argument, if necessary. self.metrics = MetricSet.load_metrics_package(package_name_or_path='verify_metrics', subset=metrics_package) self.butler = Butler(repository) self.registry = self.butler.registry self.spec = spec self.collection = collection self.dataset_name = dataset_name
def setUp(self): """Use YAML in data/metrics for metric definitions.""" self.metrics_yaml_dirname = os.path.join(os.path.dirname(__file__), 'data') self.metric_set = MetricSet.load_metrics_package( self.metrics_yaml_dirname) self.pa1_meas = Measurement(self.metric_set['testing.PA1'], 4. * u.mmag) self.am1_meas = Measurement(self.metric_set['testing.AM1'], 2. * u.marcsec) self.pa2_meas = Measurement(self.metric_set['testing.PA2'], 10. * u.mmag)
def main(): """Main entrypoint for the ``lint_metrics.py`` script. """ args = build_argparser().parse_args() print('Linting {}.'.format(args.package_dir)) metric_repo = MetricSet.load_metrics_package(args.package_dir) print('Passed: metrics/') print('\tParsed {0:d} metric sets.'.format(len(metric_repo))) spec_set = SpecificationSet.load_metrics_package(args.package_dir) print('Passed: specs/') print('\tParsed {0:d} specifications.'.format(len(spec_set))) print("\nAll tests passed.")
def load_json_output(filepath, metrics_package='verify_metrics'): """Read JSON from a file into a job object. Currently just does a trivial de-serialization with no checking to make sure that one results with a valid validate.base.job object. Parameters ---------- filepath : `str` Source file name for JSON output. Returns ------- job : A `validate.base.job` object. """ with open(filepath, 'r') as infile: json_data = json.load(infile) job = Job.deserialize(**json_data) metrics = MetricSet.load_metrics_package(metrics_package) job.metrics.update(metrics) specs = SpecificationSet.load_metrics_package(metrics_package) job.specs.update(specs) return job
def setUp(self): self.metrics_yaml_dirname = os.path.join(os.path.dirname(__file__), 'data') self.metric_set = MetricSet.load_metrics_package( self.metrics_yaml_dirname)
def test_nonexistent_package(self): """Test handling of non-existing metrics packages/directories.""" with self.assertRaises(OSError): MetricSet.load_metrics_package('nonexistent_metrics')
def setUp(self): self.metric_set = MetricSet.load_metrics_package('verify_metrics')