def main(): options = handle_args() with open(options.listings) as f: all_datasets, _ = driveami.load_listing(f, expected_datatype=driveami.Datatype.ami_la_raw) matching_datasets={} for grp_name, grp_info in all_datasets.iteritems(): for fname in grp_info['files']: if str.upper(options.match) in str.upper(str(fname)): matching_datasets[grp_name]=grp_info break if len(matching_datasets)==0: print("No matches found") return 1 if options.outfile is None: options.outfile = options.match+"_rawfiles.json" print('Datasets matching "{}" written to'.format(options.match), options.outfile) with open(options.outfile, 'w') as f: driveami.save_rawfile_listing(matching_datasets, f) return 0
def test_rawfiles_roundtrip(self): s = StringIO() driveami.save_rawfile_listing(self.testdata, s) listing, datatype = driveami.load_listing(StringIO(s.getvalue())) self.assertEqual(datatype, driveami.Datatype.ami_la_raw) self.assertEqual(listing, self.testdata)
def test_list_files(self): self.assertEqual(len(self.reduce.files), 3) self.assertEqual(self.reduce.files.values()[0], {}) self.reduce.load_obs_info() self.assertNotEqual(self.reduce.files.values()[0], {}) s = StringIO.StringIO() rawfile_dict = {fname: driveami.make_serializable(info) for fname, info in self.reduce.files.iteritems()} driveami.save_rawfile_listing(rawfile_dict, s)
def main(): options = handle_args() grouped_by_id_filename = options.outfile + '_by_id.json' grouped_by_pointing_filename = options.outfile + '_by_pointing.json' metadata_filename = options.outfile + '_metadata.json' r = driveami.Reduce(options.amidir, options.amiversion, options.array) logger.info("Loading observation metadata.") r.load_obs_info() logger.info("Grouping observations by target ID") id_groups = r.group_obs_by_target_id() with open(grouped_by_id_filename, 'w') as f: driveami.save_rawfile_listing(id_groups, f) logger.info("Grouping targets by pointing") pointing_groups = r.group_target_ids_by_pointing(id_groups, pointing_tolerance_in_degrees=0.5) with open(grouped_by_pointing_filename, 'w') as f: driveami.save_rawfile_listing(pointing_groups, f) with open(metadata_filename, 'w') as f: rawfile_dict = {fname: driveami.make_serializable(info) for fname, info in r.files.iteritems()} driveami.save_rawfile_listing(rawfile_dict, f) return 0
def main(): options = handle_args() grouped_by_id_filename = options.outfile + '_by_id.json' grouped_by_pointing_filename = options.outfile + '_by_pointing.json' metadata_filename = options.outfile + '_metadata.json' r = driveami.Reduce(options.amidir, options.amiversion, options.array) logger.info("Loading observation metadata.") r.load_obs_info() #Write file metadata with open(metadata_filename, 'w') as f: rawfile_dict = {fname: driveami.make_serializable(info) for fname, info in r.files.iteritems()} if not options.rawtext: for _, fdict in rawfile_dict.iteritems(): fdict.pop('raw_obs_listing_text') driveami.save_rawfile_listing(rawfile_dict, f) logger.info("Wrote file metadata listings to {}".format(metadata_filename)) #Write listings grouped by ID logger.info("Grouping observations by target ID") id_groups = r.group_obs_by_target_id() with open(grouped_by_id_filename, 'w') as f: driveami.save_rawfile_listing(id_groups, f) logger.info("Wrote id-grouped file-listings to {}".format(grouped_by_id_filename)) #Write listings grouped by pointing: logger.info("Grouping targets by pointing") pointing_groups = r.group_target_ids_by_pointing(id_groups, pointing_tolerance_in_degrees=0.5) with open(grouped_by_pointing_filename, 'w') as f: driveami.save_rawfile_listing(pointing_groups, f) logger.info( "Wrote pointing-grouped file-listings to {}".format( grouped_by_pointing_filename)) return 0
def test_expected_cal(self): s = StringIO() driveami.save_rawfile_listing(self.testdata, s) with self.assertRaises(ValueError): listing, datatype = driveami.load_listing(StringIO(s.getvalue()), expected_datatype=driveami.Datatype.ami_la_calibrated)