Пример #1
0
class MetricSetSubsetTestCase(unittest.TestCase):
    """Test case for MetricSet.subset."""
    def setUp(self):
        self.m1 = Metric('pkgA.m1', 'In pkgA', '', tags='testing')
        self.m2 = Metric('pkgA.m2', 'In pkgA', '', tags='other')
        self.m3 = Metric('pkgB.m3', 'In pkgB', '', tags='testing')
        self.metric_set = MetricSet([self.m1, self.m2, self.m3])

    def test_subset_A(self):
        subset = self.metric_set.subset('pkgA')
        self.assertEqual(len(subset), 2)
        self.assertIn(self.m1.name, subset)
        self.assertIn(self.m2.name, subset)
        self.assertNotIn(self.m3.name, subset)

    def test_subset_B(self):
        subset = self.metric_set.subset('pkgB')
        self.assertEqual(len(subset), 1)
        self.assertNotIn(self.m1.name, subset)
        self.assertNotIn(self.m2.name, subset)
        self.assertIn(self.m3.name, subset)

    def test_subset_testing_tag(self):
        subset = self.metric_set.subset(tags=['testing'])
        self.assertEqual(len(subset), 2)
        self.assertIn(self.m1.name, subset)
        self.assertNotIn(self.m2.name, subset)
        self.assertIn(self.m3.name, subset)

    def test_subset_A_testing_tag(self):
        subset = self.metric_set.subset(package='pkgA', tags=['testing'])
        self.assertEqual(len(subset), 1)
        self.assertIn(self.m1.name, subset)
        self.assertNotIn(self.m2.name, subset)
        self.assertNotIn(self.m3.name, subset)
Пример #2
0
    def test_insert(self):
        """Test MetricSet.insert."""
        m1 = Metric('validate_drp.test',
                    'test',
                    '',
                    reference_url='example.com',
                    reference_doc='Doc',
                    reference_page=1)
        metric_set = MetricSet()

        metric_set.insert(m1)
        self.assertEqual(m1, metric_set['validate_drp.test'])
Пример #3
0
    def test_setitem_delitem(self):
        """Test adding and deleting metrics."""
        m1 = Metric('validate_drp.test',
                    'test',
                    '',
                    reference_url='example.com',
                    reference_doc='Doc',
                    reference_page=1)
        metric_set = MetricSet()
        self.assertEqual(len(metric_set), 0)

        metric_set['validate_drp.test'] = m1
        self.assertEqual(len(metric_set), 1)
        self.assertEqual(metric_set['validate_drp.test'], m1)

        with self.assertRaises(KeyError):
            # inconsistent metric names
            metric_set['validate_drp.new_test'] = m1

        with self.assertRaises(TypeError):
            # Not a metric name
            n = Name('validate_drp')
            m2 = Metric(n, 'test', '')
            metric_set[n] = m2

        del metric_set['validate_drp.test']
        self.assertEqual(len(metric_set), 0)
Пример #4
0
def ingest_data(filenames, metrics_package):
    """Load JSON files into a list of lsst.validate.base measurement Jobs.

    Parameters
    ----------
    filenames : list of str
        Filenames of JSON files to load.

    Returns
    -------
    job_list : list of lsst.validate.base.Job
        Each element is the Job representation of the JSON file.
    """
    jobs = {}
    # Read in JSON output from metrics run
    for filename in filenames:
        with open(filename) as fh:
            data = json.load(fh)
            job = Job.deserialize(**data)
        filter_name = job.meta['filter_name']
        metrics = MetricSet.load_metrics_package(metrics_package)
        job.metrics.update(metrics)
        specs = SpecificationSet.load_metrics_package(metrics_package)
        job.specs.update(specs)
        jobs[filter_name] = job

    return jobs
Пример #5
0
def main():
    try:
        default_metrics_package_dir = getPackageDir('verify_metrics')
    except lsst.pex.exceptions.NotFoundError:
        default_metrics_package_dir = None

    parser = argparse.ArgumentParser(
        description=__doc__,
        formatter_class=argparse.RawDescriptionHelpFormatter)
    parser.add_argument("package_dir",
                        default=default_metrics_package_dir,
                        type=str,
                        nargs='?',
                        help="Filepath of the metrics package to be checked.")
    args = parser.parse_args()

    print('Linting {}.'.format(args.package_dir))

    metric_repo = MetricSet.load_metrics_package(args.package_dir)
    print('Passed: metrics/')
    print('\tParsed {0:d} metric sets.'.format(len(metric_repo)))

    spec_set = SpecificationSet.load_metrics_package(args.package_dir)
    print('Passed: specs/')
    print('\tParsed {0:d} specifications.'.format(len(spec_set)))

    print("\nAll tests passed.")
Пример #6
0
 def __init__(self, repository, collection, metrics_package, spec, dataset_name):
     # Hard coding verify_metrics as the packager for now.
     # It would be easy to pass this in as an argument, if necessary.
     self.metrics = MetricSet.load_metrics_package(package_name_or_path='verify_metrics',
                                                   subset=metrics_package)
     self.butler = Butler(repository)
     self.registry = self.butler.registry
     self.spec = spec
     self.collection = collection
     self.dataset_name = dataset_name
    def setUp(self):
        """Use YAML in data/metrics for metric definitions."""
        self.metrics_yaml_dirname = os.path.join(os.path.dirname(__file__),
                                                 'data')
        self.metric_set = MetricSet.load_metrics_package(
            self.metrics_yaml_dirname)

        self.pa1_meas = Measurement(self.metric_set['testing.PA1'],
                                    4. * u.mmag)
        self.am1_meas = Measurement(self.metric_set['testing.AM1'],
                                    2. * u.marcsec)
        self.pa2_meas = Measurement(self.metric_set['testing.PA2'],
                                    10. * u.mmag)
Пример #8
0
    def setUp(self):
        # Mock metrics
        self.metric_photrms = Metric('test.PhotRms', 'Photometric RMS', 'mmag')
        self.metric_photmed = Metric('test.PhotMedian', 'Median magntidue',
                                     'mag')
        self.metric_set = MetricSet([self.metric_photrms, self.metric_photmed])

        # Mock specifications
        self.spec_photrms_design = ThresholdSpecification(
            'test.PhotRms.design', 20. * u.mmag, '<')
        self.spec_set = SpecificationSet([self.spec_photrms_design])

        # Mock measurements
        self.meas_photrms = Measurement(self.metric_photrms,
                                        15 * u.mmag,
                                        notes={'note': 'value'})
        self.meas_photrms.extras['n_stars'] = Datum(
            250,
            label='N stars',
            description='Number of stars included in RMS estimate')
        self.measurement_set = MeasurementSet([self.meas_photrms])

        # Metrics for Job 2
        self.metric_test_2 = Metric('test2.SourceCount', 'Source Count', '')
        self.blob_test_2 = Blob('test2_blob',
                                sn=Datum(50 * u.dimensionless_unscaled,
                                         label='S/N'))
        self.metric_set_2 = MetricSet([self.metric_test_2])

        # Specifications for Job 2
        self.spec_test_2 = ThresholdSpecification(
            'test2.SourceCount.design', 100 * u.dimensionless_unscaled, '>=')
        self.spec_set_2 = SpecificationSet([self.spec_test_2])

        # Measurements for Job 2
        self.meas_test_2_SourceCount = Measurement(
            self.metric_test_2, 200 * u.dimensionless_unscaled)
        self.meas_test_2_SourceCount.link_blob(self.blob_test_2)
        self.measurement_set_2 = MeasurementSet([self.meas_test_2_SourceCount])
Пример #9
0
def main():
    """Main entrypoint for the ``lint_metrics.py`` script.
    """
    args = build_argparser().parse_args()

    print('Linting {}.'.format(args.package_dir))

    metric_repo = MetricSet.load_metrics_package(args.package_dir)
    print('Passed: metrics/')
    print('\tParsed {0:d} metric sets.'.format(len(metric_repo)))

    spec_set = SpecificationSet.load_metrics_package(args.package_dir)
    print('Passed: specs/')
    print('\tParsed {0:d} specifications.'.format(len(spec_set)))

    print("\nAll tests passed.")
Пример #10
0
    def test_update(self):
        """Test MetricSet.update."""
        m1 = Metric('validate_drp.test',
                    'test',
                    '',
                    reference_url='example.com',
                    reference_doc='Doc',
                    reference_page=1)
        new_metric_set = MetricSet([m1])

        self.metric_set.update(new_metric_set)

        self.assertIn('validate_drp.test', self.metric_set)
        self.assertIn('testing.PA1', self.metric_set)
        self.assertIn('testing.PF1', self.metric_set)
        self.assertIn('testing.PA2', self.metric_set)
        self.assertIn('testing.AM1', self.metric_set)
Пример #11
0
    def test_iadd(self):
        """Test __iadd__ to merging metric sets."""
        m1 = Metric('validate_drp.test',
                    'test',
                    '',
                    reference_url='example.com',
                    reference_doc='Doc',
                    reference_page=1)
        new_metric_set = MetricSet([m1])

        self.metric_set += new_metric_set

        self.assertIn('validate_drp.test', self.metric_set)
        self.assertIn('testing.PA1', self.metric_set)
        self.assertIn('testing.PF1', self.metric_set)
        self.assertIn('testing.PA2', self.metric_set)
        self.assertIn('testing.AM1', self.metric_set)
Пример #12
0
def load_json_output(filepath, metrics_package='verify_metrics'):
    """Read JSON from a file into a job object.

    Currently just does a trivial de-serialization with no checking
    to make sure that one results with a valid validate.base.job object.

    Parameters
    ----------
    filepath : `str`
        Source file name for JSON output.

    Returns
    -------
    job : A `validate.base.job` object.
    """
    with open(filepath, 'r') as infile:
        json_data = json.load(infile)

    job = Job.deserialize(**json_data)
    metrics = MetricSet.load_metrics_package(metrics_package)
    job.metrics.update(metrics)
    specs = SpecificationSet.load_metrics_package(metrics_package)
    job.specs.update(specs)
    return job
Пример #13
0
 def test_serialization(self):
     json_doc = self.metric_set.json
     new_metric_set = MetricSet.deserialize(json_doc)
     self.assertEqual(self.metric_set, new_metric_set)
 def setUp(self):
     self.metrics_yaml_dirname = os.path.join(os.path.dirname(__file__),
                                              'data')
     self.metric_set = MetricSet.load_metrics_package(
         self.metrics_yaml_dirname)
Пример #15
0
 def setUp(self):
     self.metric_set = MetricSet.load_metrics_package('verify_metrics')
Пример #16
0
 def test_nonexistent_package(self):
     """Test handling of non-existing metrics packages/directories."""
     with self.assertRaises(OSError):
         MetricSet.load_metrics_package('nonexistent_metrics')
Пример #17
0
 def setUp(self):
     self.m1 = Metric('pkgA.m1', 'In pkgA', '', tags=['testing'])
     self.m2 = Metric('pkgA.m2', 'In pkgA', '', tags=['other'])
     self.m3 = Metric('pkgB.m3', 'In pkgB', '', tags=['testing'])
     self.metric_set = MetricSet([self.m1, self.m2, self.m3])