def ingest_data(filenames, metrics_package): """Load JSON files into a list of lsst.validate.base measurement Jobs. Parameters ---------- filenames : list of str Filenames of JSON files to load. Returns ------- job_list : list of lsst.validate.base.Job Each element is the Job representation of the JSON file. """ jobs = {} # Read in JSON output from metrics run for filename in filenames: with open(filename) as fh: data = json.load(fh) job = Job.deserialize(**data) filter_name = job.meta['filter_name'] metrics = MetricSet.load_metrics_package(metrics_package) job.metrics.update(metrics) specs = SpecificationSet.load_metrics_package(metrics_package) job.specs.update(specs) jobs[filter_name] = job return jobs
def unpersistJob(fileName): """Unpersist a Job object from the filename of its serialized form. Returns ------- The `lsst.verify.Job` object contained in `fileName`. """ with open(fileName) as handle: return Job.deserialize(**json.load(handle))
def main(): """Present all Job files. """ args = build_argparser().parse_args() for filename in args.json_paths: if len(args.json_paths) > 1: print("\n%s:" % filename) with open(filename) as f: job = Job.deserialize(**json.load(f)) inspect_job(job)
def main(filenames): """Present all Job files. Parameters ---------- filenames : `list` of `str` The Job files to open. Must be in JSON format. """ for filename in filenames: if len(filenames) > 1: print("\n%s:" % filename) with open(filename) as f: job = Job.deserialize(**json.load(f)) inspect_job(job)
def load_json_output(filepath, metrics_package='verify_metrics'): """Read JSON from a file into a job object. Currently just does a trivial de-serialization with no checking to make sure that one results with a valid validate.base.job object. Parameters ---------- filepath : `str` Source file name for JSON output. Returns ------- job : A `validate.base.job` object. """ with open(filepath, 'r') as infile: json_data = json.load(infile) job = Job.deserialize(**json_data) metrics = MetricSet.load_metrics_package(metrics_package) job.metrics.update(metrics) specs = SpecificationSet.load_metrics_package(metrics_package) job.specs.update(specs) return job
def main(): """Entrypoint for the ``dispatch_verify.py`` command line executable. """ log = lsst.log.Log.getLogger('verify.bin.dispatchverify.main') args = parse_args() config = Configuration(args) log.debug(str(config)) # Parse all Job JSON jobs = [] for json_path in config.json_paths: log.info('Loading {0}'.format(json_path)) with open(json_path) as fp: json_data = json.load(fp) job = Job.deserialize(**json_data) jobs.append(job) # Merge all Jobs into one job = jobs.pop(0) if len(jobs) > 0: log.info('Merging verification Job JSON.') for other_job in jobs: job += other_job # Ensure all measurements have a metric so that units are normalized log.info('Refreshing metric definitions from verify_metrics') job.reload_metrics_package('verify_metrics') # Insert package metadata from lsstsw if not config.ignore_lsstsw: log.info('Inserting lsstsw package metadata from ' '{0}.'.format(config.lsstsw)) job = insert_lsstsw_metadata(job, config) # Insert metadata from additional specified packages if config.extra_package_paths is not None: job = insert_extra_package_metadata(job, config) # Add environment variable metadata from the Jenkins CI environment if config.env_name == 'jenkins': log.info('Inserting Jenkins CI environment metadata.') jenkins_metadata = get_jenkins_env() job = insert_env_metadata(job, 'jenkins', jenkins_metadata) # Upload job if not config.test: log.info('Uploading Job JSON to {0}.'.format(config.api_url)) job.dispatch(api_user=config.api_user, api_password=config.api_password, api_url=config.api_url) if config.show_json: print( json.dumps(job.json, sort_keys=True, indent=4, separators=(',', ': '))) # Write a json file if config.output_filepath is not None: log.info('Writing Job JSON to {0}.'.format(config.output_filepath)) job.write(config.output_filepath)
def main(): """Entrypoint for the ``dispatch_verify.py`` command line executable. """ logging.basicConfig(level=logging.INFO, stream=sys.stdout, format="{name} {levelname}: {message}", style="{") log = _LOG.getChild('main') parser = build_argparser() args = parser.parse_args() config = Configuration(args) log.debug(str(config)) # Parse all Job JSON jobs = [] for json_path in config.json_paths: log.info('Loading {0}'.format(json_path)) with open(json_path) as fp: json_data = json.load(fp) # Ignore blobs from the verification jobs if config.ignore_blobs: log.info('Ignoring blobs from Job JSON {0}'.format(json_path)) json_data = delete_blobs(json_data) job = Job.deserialize(**json_data) jobs.append(job) # Merge all Jobs into one job = jobs.pop(0) if len(jobs) > 0: log.info('Merging verification Job JSON.') for other_job in jobs: job += other_job # Ensure all measurements have a metric so that units are normalized log.info('Refreshing metric definitions from verify_metrics') job.reload_metrics_package('verify_metrics') # Insert package metadata from lsstsw if not config.ignore_lsstsw: log.info('Inserting lsstsw package metadata from ' '{0}.'.format(config.lsstsw)) job = insert_lsstsw_metadata(job, config) # Insert metadata from additional specified packages if config.extra_package_paths is not None: job = insert_extra_package_metadata(job, config) # Add environment variable metadata from the Jenkins CI environment if config.env_name == 'jenkins': log.info('Inserting Jenkins CI environment metadata.') jenkins_metadata = get_jenkins_env() job = insert_env_metadata(job, 'jenkins', jenkins_metadata, config.date_created) elif config.env_name == 'ldf': log.info('Inserting LSST Data Facility environment metadata.') ldf_metadata = get_ldf_env() job = insert_env_metadata(job, 'ldf', ldf_metadata, config.date_created) # Upload job if not config.test: log.info('Uploading Job JSON to {0}.'.format(config.api_url)) response = job.dispatch(api_user=config.api_user, api_password=config.api_password, api_url=config.api_url) log.info(response.json()['message']) if config.show_json: print( json.dumps(job.json, sort_keys=True, indent=4, separators=(',', ': '))) # Write a json file if config.output_filepath is not None: log.info('Writing Job JSON to {0}.'.format(config.output_filepath)) job.write(config.output_filepath)
def test_job(self): """Create a Job from object sets.""" job = Job(metrics=self.metric_set, specs=self.spec_set, measurements=self.measurement_set) # Test object access via properties self.assertIn('test.PhotRms.design', job.specs) self.assertIn('test.PhotRms', job.metrics) self.assertIn('test.PhotRms', job.measurements) # Test metadata access self.assertIn('test.PhotRms.note', job.meta) self.assertEqual(job.meta['test.PhotRms.note'], 'value') # measurement metadata is always prefixed self.assertNotIn('note', job.meta) job.meta['job-level-key'] = 'yes' self.assertEqual(job.meta['job-level-key'], 'yes') self.assertIn('job-level-key', job.meta) self.assertEqual(len(job.meta), 2) job.meta.update({'test.PhotRms.note2': 'foo', 'dataset': 'ci_hsc'}) # note2 should be in measurement notes self.assertEqual(job.measurements['test.PhotRms'].notes['note2'], 'foo') self.assertEqual(job.meta['dataset'], 'ci_hsc') # Delete measurement and job-level metadata del job.meta['test.PhotRms.note2'] self.assertNotIn('test.PhotRms.note2', job.meta) self.assertNotIn('note2', job.measurements['test.PhotRms'].notes) del job.meta['dataset'] self.assertNotIn('dataset', job.meta) self.assertEqual(set(job.meta.keys()), set(['job-level-key', 'test.PhotRms.note'])) self.assertEqual(set([key for key in job.meta]), set(['job-level-key', 'test.PhotRms.note'])) keys = set() for key, value in job.meta.items(): keys.add(key) self.assertEqual(keys, set(['job-level-key', 'test.PhotRms.note'])) # Add a new measurement m = Measurement('test.PhotMedian', 28.5 * u.mag, notes={'aperture_corr': True}) job.measurements.insert(m) self.assertIn('test.PhotMedian', job.measurements) self.assertEqual(job.meta['test.PhotMedian.aperture_corr'], True) # Test serialization json_doc = job.json self.assertIn('measurements', json_doc) self.assertEqual(len(json_doc['measurements']), len(job.measurements)) self.assertIn('blobs', json_doc) self.assertIn('metrics', json_doc) self.assertEqual(len(json_doc['metrics']), len(job.metrics)) self.assertIn('specs', json_doc) self.assertEqual(len(json_doc['specs']), len(job.specs)) self.assertIn('meta', json_doc) self.assertEqual(len(json_doc['meta']), len(job.meta)) new_job = Job.deserialize(**json_doc) self.assertEqual(job, new_job) # check job-to-measurement metadata deserialization self.assertEqual(new_job.measurements['test.PhotRms'].notes['note'], 'value') self.assertEqual(new_job.meta['test.PhotRms.note'], 'value') self.assertEqual(new_job.meta['job-level-key'], 'yes')