def main(self): """Perform the differencing.""" self.args.files = [ self.args.old_file, self.args.new_file ] # for defining self.observatory self.old_file = self.locate_file(self.args.old_file) self.new_file = self.locate_file(self.args.new_file) if self.args.brief: self.args.lowest_mapping_only = True self.args.remove_paths = True self.args.hide_boring_diffs = True self.args.include_header_diffs = True if self.args.sync_files: assert not (self.args.cache1 or self.args.cache2), \ "--sync-files is not compatible with cache-to-cache differences." if self.args.print_all_new_files: serial_old = naming.newstyle_serial(self.old_file) serial_new = naming.newstyle_serial(self.new_file) + 1 if None not in [serial_old, serial_new]: errs = sync.SyncScript("crds.sync --range {0}:{1}".format(serial_old, serial_new))() assert not errs, "Errors occurred while syncing all rules to CRDS cache." else: log.warning("Cannot sync non-standard mapping names, results may be incomplete.") else: self.sync_files([self.old_file, self.new_file]) elif self.args.print_all_new_files: log.warning("--print-all-new-files requires a complete set of rules. suggest --sync-files.") # self.args.files = [ self.old_file, self.new_file ] # for defining self.observatory assert (self.args.cache1 and self.args.cache2) or (not self.args.cache1 and not self.args.cache2), \ "Cache-to-cache comparison requires both --cache1 and --cache2; otherwise neither for single cache comparison." if self.args.print_new_files: status = self.print_new_files() elif self.args.print_all_new_files: status = self.print_all_new_files() elif self.args.print_affected_instruments: status = self.print_affected_instruments() elif self.args.print_affected_types: status = self.print_affected_types() elif self.args.print_affected_modes: status = self.print_affected_modes() else: status = difference(self.observatory, self.old_file, self.new_file, primitive_diffs=self.args.primitive_diffs, check_diffs=self.args.check_diffs, check_references=self.args.check_references, mapping_text_diffs=self.args.mapping_text_diffs, include_header_diffs=self.args.include_header_diffs, hide_boring_diffs=self.args.hide_boring_diffs, recurse_added_deleted=self.args.recurse_added_deleted, lowest_mapping_only=self.args.lowest_mapping_only, remove_paths=self.args.remove_paths, squash_tuples=self.args.squash_tuples, cache1=self.args.cache1, cache2=self.args.cache2) if log.errors() or log.warnings(): return 2 else: return status
def main(self): """Process command line parameters in to a context and list of reference files. Print out the match tuples within the context which contain the reference files. """ if self.matched_files: self.dump_reference_matches() elif self.args.datasets or self.args.instrument: self.dump_dataset_headers() else: self.print_help() log.error("Specify --files to dump reference match cases or --datasets to dump dataset matching parameters.") return log.errors()
def main(self): """Top level processing method.""" self.require_server_connection() if self.args.list_history: return self.list_history() if self.args.reset: return self.reset_last_processed() effects = self.polled() ids = self.process(effects) self.use_all_ids(effects, ids) self.log_all_ids(effects, ids) if effects: self.save_last_processed(effects) return log.errors()
def main(self): with log.error_on_exception("Refactoring operation FAILED"): if self.args.command == "insert": rmap_insert_references(self.args.old_rmap, self.args.new_rmap, self.args.references) elif self.args.command == "delete": rmap_delete_references(self.args.old_rmap, self.args.new_rmap, self.args.references) elif self.args.command == "set_header": set_header_value(self.args.old_rmap, self.args.new_rmap, self.args.references[0], " ".join(self.args.references[1:])) elif self.args.command == "del_header": del_header_value(self.args.old_rmap, self.args.new_rmap, self.args.references[0]) else: raise ValueError("Unknown refactoring command: " + repr(self.args.command)) return log.errors()
def main(self): if self.args.best_effort: os.environ["PASS_INVALID_VALUES"] = "1" # JWST SSB cal code data model os.environ["CRDS_ALLOW_BAD_USEAFTER"] = "1" # Don't fail for bad USEAFTER values os.environ["CRDS_ALLOW_SCHEMA_VIOLATIONS"] = "1" # Don't fail for data model bad value errors os.environ["CRDS_ALLOW_BAD_PARKEY_VALUES"] = "1" # Don't fail for values which don't pass DM + .tpn checking if self.args.rmaps: # clean up dead lines from file lists self.args.rmaps = [ self.resolve_context(mapping) for mapping in self.args.rmaps if mapping.strip() ] if self.args.references: self.args.references = [self.locate_file(reference) for reference in self.args.references] with log.error_on_exception("Refactoring operation FAILED"): if self.args.command == "insert_reference": if self.args.old_rmap: old_rmap, new_rmap = self.resolve_context(self.args.old_rmap), self.resolve_context(self.args.new_rmap) rmap_insert_references(old_rmap, new_rmap, self.args.references) else: self.insert_references() # figure it all out relative to --source-context elif self.args.command == "delete_reference": old_rmap, new_rmap = self.resolve_context(self.args.old_rmap), self.resolve_context(self.args.new_rmap) rmap_delete_references(old_rmap, new_rmap, self.args.references) elif self.args.command == "del_header": self.del_header_key() elif self.args.command == "set_header": self.set_header_key() elif self.args.command == "del_parameter": self.del_parameter() elif self.args.command == "set_parkey": self.set_parkey() elif self.args.command == "replace_text": self.replace_text() elif self.args.command == "set_substitution": self.set_substitution() elif self.args.command == "cat": self.cat() elif self.args.command == "add_useafter": self.add_useafter() elif self.args.command == "diff_rmaps": self.diff_rmaps() elif self.args.command == "certify_rmaps": self.certify_rmaps() else: raise ValueError("Unknown refactoring command: " + repr(self.args.command)) log.standard_status() return log.errors()
def main(self): """Process command line parameters in to a context and list of reference files. Print out the match tuples within the context which contain the reference files. """ if self.matched_files: self.dump_reference_matches() elif self.args.datasets or self.args.instrument: self.dump_dataset_headers() else: self.print_help() log.error( "Specify --files to dump reference match cases or --datasets to dump dataset matching parameters." ) return log.errors()
def main(self): """Synchronize files.""" if self.args.dry_run: self.args.readonly_cache = True if self.args.repair_files: self.args.check_files = True if self.args.organize: # do this before syncing anything under the current mode. self.organize_references(self.args.organize) self.require_server_connection() if self.readonly_cache and self.args.verify_context_change: log.error("--readonly-cache and --verify-context-change are incompatible, a readonly cache cannot change.") if self.args.files: self.sync_explicit_files() verify_file_list = self.files elif self.args.fetch_sqlite_db: self.fetch_sqlite_db() elif self.contexts: active_mappings = self.get_context_mappings() verify_file_list = active_mappings if self.args.fetch_references or self.args.purge_references: if self.args.dataset_files or self.args.dataset_ids: active_references = self.sync_datasets() else: active_references = self.get_context_references() active_references = sorted(set(active_references + self.get_conjugates(active_references))) if self.args.fetch_references: self.fetch_references(active_references) verify_file_list += active_references if self.args.purge_references: self.purge_references(active_references) if self.args.purge_mappings: self.purge_mappings() else: log.error("Define --all, --contexts, --last, --range, --files, or --fetch-sqlite-db to sync.") sys.exit(-1) if self.args.check_files or self.args.check_sha1sum or self.args.repair_files: self.verify_files(verify_file_list) if self.args.verify_context_change: old_context = heavy_client.load_server_info(self.observatory).operational_context heavy_client.update_config_info(self.observatory) if self.args.verify_context_change: self.verify_context_change(old_context) if self.args.push_context: self.push_context() self.report_stats() log.standard_status() return log.errors()
def main(self): if self.args.rmaps: # clean up dead lines from file lists self.args.rmaps = [ self.resolve_context(mapping) for mapping in self.args.rmaps if mapping.strip() ] if self.args.references: self.args.references = [self.locate_file(reference) for reference in self.args.references] with log.error_on_exception("Refactoring operation FAILED"): if self.args.command == "insert_reference": if self.args.old_rmap: old_rmap, new_rmap = self.resolve_context(self.args.old_rmap), self.resolve_context(self.args.new_rmap) rmap_insert_references(old_rmap, new_rmap, self.args.references) else: self.insert_references() # figure it all out relative to --source-context elif self.args.command == "delete_reference": old_rmap, new_rmap = self.resolve_context(self.args.old_rmap), self.resolve_context(self.args.new_rmap) rmap_delete_references(old_rmap, new_rmap, self.args.references) elif self.args.command == "del_header": self.del_header_key() elif self.args.command == "set_header": self.set_header_key() elif self.args.command == "del_parameter": self.del_parameter() elif self.args.command == "set_parkey": self.set_parkey() elif self.args.command == "replace_text": self.replace_text() elif self.args.command == "set_substitution": self.set_substitution() elif self.args.command == "cat": self.cat() elif self.args.command == "add_useafter": self.add_useafter() elif self.args.command == "diff_rmaps": self.diff_rmaps() elif self.args.command == "certify_rmaps": self.certify_rmaps() else: raise ValueError("Unknown refactoring command: " + repr(self.args.command)) log.standard_status() return log.errors()
def main(self): """Main control flow of submission directory and request manifest creation.""" log.divider("setting up", char="=") self.require_server_connection() self.finish_parameters() if self.args.logout: return self.logout() self.submission = self.create_submission() self.login() if self.args.wipe_existing_files: self.wipe_files() self.jpoll_key = self.jpoll_open_channel() if self.args.submission_kind == "batch": submit_future = self.batch_submit_references() elif self.args.submission_kind == "certify": submit_future = self.certify_files() elif self.args.submission_kind == "references": submit_future = self.submit_references() elif self.args.submission_kind == "mappings": submit_future = self.submit_mappings() if self.args.monitor_processing: monitor_future = self.monitor() if self.args.wait_for_completion: self.submission_complete(submit_future) if self.args.monitor_processing: self.monitor_complete(monitor_future) log.standard_status() return log.errors()
def main(self): """Process command line parameters in to a context and list of reference files. Print out the match tuples within the context which contain the reference files. """ if self.args.update_db or not os.path.exists(self.sqlite_db_path): self.fetch_sqlite_db() if self.args.list_tables: self.list_tables() if self.args.list_columns: self.list_columns(self.args.list_columns[0]) if self.args.list_database_path: print(self.sqlite_db_path) if self.args.sql_query: self.run_query(" ".join(self.args.sql_query)) return log.errors()
def main(self): name_map = new_context(self.args.old_pmap, self.args.new_rmap) update_header_names(name_map) return log.errors()
def main(self): """Synchronize files.""" if self.args.dry_run: self.args.readonly_cache = True if self.args.repair_files: self.args.check_files = True if self.args.output_dir: os.environ["CRDS_MAPPATH_SINGLE"] = self.args.output_dir os.environ["CRDS_REFPATH_SINGLE"] = self.args.output_dir os.environ["CRDS_CFGPATH_SINGLE"] = self.args.output_dir os.environ["CRDS_PICKLEPATH_SINGLE"] = self.args.output_dir if self.args.clear_pickles or self.args.ignore_cache or self.args.repair_files: self.clear_pickles(self.contexts) if self.args.organize: # do this before syncing anything under the current mode. self.organize_references(self.args.organize) self.require_server_connection() if self.readonly_cache and self.args.verify_context_change: log.error("--readonly-cache and --verify-context-change are incompatible, a readonly cache cannot change.") if self.args.files: self.sync_explicit_files() verify_file_list = self.files elif self.args.fetch_sqlite_db: self.fetch_sqlite_db() elif self.contexts: active_mappings = self.get_context_mappings() verify_file_list = active_mappings if self.args.fetch_references or self.args.purge_references: if self.args.dataset_files or self.args.dataset_ids: active_references = self.sync_datasets() else: active_references = self.get_context_references() active_references = sorted(set(active_references + self.get_conjugates(active_references))) if self.args.fetch_references: self.fetch_references(active_references) verify_file_list += active_references if self.args.purge_references: self.purge_references(active_references) if self.args.purge_mappings: self.purge_mappings() else: log.error("Define --all, --contexts, --last, --range, --files, or --fetch-sqlite-db to sync.") sys.exit(-1) if self.args.check_files or self.args.check_sha1sum or self.args.repair_files: self.verify_files(verify_file_list) if self.args.save_pickles: self.pickle_contexts(self.contexts) if self.args.verify_context_change: old_context = heavy_client.load_server_info(self.observatory).operational_context heavy_client.update_config_info(self.observatory) if self.args.verify_context_change: self.verify_context_change(old_context) if self.args.push_context: self.push_context() self.report_stats() log.standard_status() return log.errors()
def main(self): """Perform the differencing.""" self.args.files = [self.args.old_file, self.args.new_file] # for defining self.observatory self.old_file = self.locate_file(self.args.old_file) self.new_file = self.locate_file(self.args.new_file) if self.args.brief: self.args.lowest_mapping_only = True self.args.remove_paths = True self.args.hide_boring_diffs = True self.args.include_header_diffs = True if self.args.sync_files: assert not (self.args.cache1 or self.args.cache2), \ "--sync-files is not compatible with cache-to-cache differences." if self.args.print_all_new_files: serial_old = naming.newstyle_serial(self.old_file) serial_new = naming.newstyle_serial(self.new_file) + 1 if None not in [serial_old, serial_new]: errs = sync.SyncScript("crds.sync --range {0}:{1}".format( serial_old, serial_new))() assert not errs, "Errors occurred while syncing all rules to CRDS cache." else: log.warning( "Cannot sync non-standard mapping names, results may be incomplete." ) else: self.sync_files([self.old_file, self.new_file]) elif self.args.print_all_new_files: log.warning( "--print-all-new-files requires a complete set of rules. suggest --sync-files." ) # self.args.files = [ self.old_file, self.new_file ] # for defining self.observatory assert (self.args.cache1 and self.args.cache2) or (not self.args.cache1 and not self.args.cache2), \ "Cache-to-cache comparison requires both --cache1 and --cache2; otherwise neither for single cache comparison." if self.args.print_new_files: status = self.print_new_files() elif self.args.print_all_new_files: status = self.print_all_new_files() elif self.args.print_affected_instruments: status = self.print_affected_instruments() elif self.args.print_affected_types: status = self.print_affected_types() elif self.args.print_affected_modes: status = self.print_affected_modes() else: status = difference( self.observatory, self.old_file, self.new_file, primitive_diffs=self.args.primitive_diffs, check_diffs=self.args.check_diffs, check_references=self.args.check_references, mapping_text_diffs=self.args.mapping_text_diffs, include_header_diffs=self.args.include_header_diffs, hide_boring_diffs=self.args.hide_boring_diffs, recurse_added_deleted=self.args.recurse_added_deleted, lowest_mapping_only=self.args.lowest_mapping_only, remove_paths=self.args.remove_paths, squash_tuples=self.args.squash_tuples, cache1=self.args.cache1, cache2=self.args.cache2) if log.errors() or log.warnings(): return 2 else: return status
for hdu in hdus: if "CHECKSUM" in hdu.header or "DATASUM" in hdu.header: add_checksum = True break else: add_checksum = False return add_checksum def uniqname(old_path): """Rename file named `oldpath` to a newstyle HST uniqname format name. This function is used to integrate uniqname with the HST CRDS servers as the approach for "Auto Rename". This function rewrites the original file at a new name/path and removes the original since the new file is not only renamed but different. Verify FITS compliance and any FITS checksums, raising an exception on any problem. Add FILENAME, ROOTNAME, and HISTORY keywords. Preserve any FITS checksums. Returns new_cdbs_style_name : str """ add_checksums = "--add-checksum" if checksum_exists(old_path) else "" new_name = UniqnameScript("crds.uniqname --files {0} --standard --remove-original --fits-errors {1}".format( old_path, add_checksums))() return new_name if __name__ == "__main__": UniqnameScript()() sys.exit(log.errors())