Esempio n. 1
0
def check_duplicates(file_):
    """Before rewriting an rmap to update the checksum, certify to ensure no
    duplicates (or other certify errors) exist prior to rewriting checksum.
    """
    old_errs = log.errors()
    parsing = mapping_parser.parse_mapping(file_)
    mapping_parser.check_duplicates(parsing)
    new_errs = log.errors()
    return new_errs > old_errs
Esempio n. 2
0
 def certify_local_files(self):
     """Run CRDS certify on submitted files and fail/exit if errors are
     found.
     """
     if log.errors():
         raise CrdsError("Errors encountered before CRDS certify run.")
     certify.certify_files(
         self.files, context=self.pmap_name, dump_provenance=True,
         compare_old_reference=True, observatory=self.observatory,
         run_fitsverify=True, check_rmap=False, check_sha1sums=True)
     if log.errors():
         raise CrdsError("Certify errors found.  Aborting submission.")
Esempio n. 3
0
    def main(self):
        """Synchronize files."""

        # clear any mutliprocessing locks associated with the CRDS cache.
        if self.args.clear_locks:
            crds_cache_locking.clear_cache_locks()
            return log.errors()

        self.handle_misc_switches()   # simple side effects only

        # if explicitly requested,  or the cache is suspect or being ignored,  clear
        # cached context pickles.
        if self.args.clear_pickles or self.args.ignore_cache or self.args.repair_files:
            self.clear_pickles()

        # utility to change cache structure, e.g. add instrument subdirs.
        # do this before syncing anything under the current mode.
        if self.args.organize:
            self.organize_references(self.args.organize)

        # fetching and verifying files both require a server connection.
        self.require_server_connection()

        # primary sync'ing occurs here,  both mappings and references as well as odd-ball
        # server sqlite3 database download.
        verify_file_list = self.file_transfers()

        # verification is relative to sync'ed files,  and can include file replacement if
        # defects are found.
        if (self.args.check_files or self.args.check_sha1sum or self.args.repair_files or
            self.args.purge_blacklisted or self.args.purge_rejected):
            self.verify_files(verify_file_list)

        # context pickles should only be (re)generated after mappings are fully sync'ed and verified
        if self.args.save_pickles:
            self.pickle_contexts(self.contexts)

        # update CRDS cache config area,  including stored version of operational context.
        # implement pipeline support functions of context update verify and echo
        # If --output-dir was specified,  do not update cache config.
        if self.args.output_dir:
            log.verbose_warning("Used --output-dir,  skipping cache server_config update including default context and bad files.")
            if config.writable_cache_or_verbose("skipping removal of ref_cache_subdir_mode file."):
                os.remove(config.get_crds_ref_subdir_file(self.observatory))
        else:
            self.update_context()

        self.report_stats()
        log.standard_status()
        return log.errors()
Esempio n. 4
0
    def main(self):
        """Main control flow of submission directory and request manifest creation."""

        log.divider("setting up", char="=")

        self.require_server_connection()

        self.finish_parameters()

        if self.args.certify_files:
            self.certify_local_files()

        if self.args.logout:
            return self.logout()

        self.login()

        if self.args.wipe_existing_files:
            self.wipe_files()

        self.jpoll_key = self.jpoll_open_channel()

        if self.args.submission_kind == "batch":
            submit_future = self.batch_submit_references()
        elif self.args.submission_kind == "certify":
            submit_future = self.certify_files()
        elif self.args.submission_kind == "references":
            submit_future = self.submit_references()
        elif self.args.submission_kind == "mappings":
            submit_future = self.submit_mappings()

        if self.args.monitor_processing:
            monitor_future = self.monitor()

        if self.args.wait_for_completion:
            self.submission_complete(submit_future)

        if self.args.monitor_processing:
            monitor = self.monitor_complete(monitor_future)
            if monitor.exit_status == 0:
                self._ready_url = monitor.result

        log.standard_status()

        self._error_count = log.errors()
        self._warning_count = log.warnings()

        return log.errors()
Esempio n. 5
0
    def main(self):
        """Perform the high level sequence of tasks needed to download and
        organize a version of pysynphot files under the specified directory.
        """
        self.deferred_init()

        # Blow away CRDS cache prior to syncing
        if not self.args.keep_crds:
            self.rmdir("crds")

        self.crds_download()
            
        syn_name_map = self.pysyn_cdbs_from_syn_tables()

        # Blow away comp and mtab to remove old versions of links
        if not self.args.keep_synphot:
            self.rmdir("comp")
            self.rmdir("mtab")
            
        self.cross_link_cdbs_paths(syn_name_map)

        # Blow away CRDS cache leaving only synphot organization.
        # in the final product.
        if not self.args.keep_crds:
            self.rmdir("crds")
            
        log.standard_status()    
        return log.errors()
Esempio n. 6
0
    def main(self):
        """Perform the high level sequence of tasks needed to download and
        organize a version of pysynphot files under the specified directory.
        """
        self.deferred_init()

        # Blow away CRDS cache prior to syncing
        if not self.args.keep_crds:
            self.rmdir("crds")

        self.crds_download()

        syn_name_map = self.pysyn_cdbs_from_syn_tables()

        # Blow away comp and mtab to remove old versions of links
        if not self.args.keep_synphot:
            self.rmdir("comp")
            self.rmdir("mtab")

        self.cross_link_cdbs_paths(syn_name_map)

        # Blow away CRDS cache leaving only synphot organization.
        # in the final product.
        if not self.args.keep_crds:
            self.rmdir("crds")

        log.standard_status()
        return log.errors()
Esempio n. 7
0
    def main(self):
        """Synchronize files."""

        # clear any mutliprocessing locks associated with the CRDS cache.
        if self.args.clear_locks:
            crds_cache_locking.clear_cache_locks()
            return log.errors()

        self.handle_misc_switches()   # simple side effects only
        
        # if explicitly requested,  or the cache is suspect or being ignored,  clear
        # cached context pickles.
        if self.args.clear_pickles or self.args.ignore_cache or self.args.repair_files:
            self.clear_pickles()

        # utility to change cache structure, e.g. add instrument subdirs.
        # do this before syncing anything under the current mode.
        if self.args.organize:   
            self.organize_references(self.args.organize)

        # fetching and verifying files both require a server connection.
        self.require_server_connection()

        # primary sync'ing occurs here,  both mappings and references as well as odd-ball
        # server sqlite3 database download.
        verify_file_list = self.file_transfers()

        # verification is relative to sync'ed files,  and can include file replacement if
        # defects are found.
        if self.args.check_files or self.args.check_sha1sum or self.args.repair_files:
            self.verify_files(verify_file_list)
            
        # context pickles should only be (re)generated after mappings are fully sync'ed and verified
        if self.args.save_pickles:
            self.pickle_contexts(self.contexts)

        # update CRDS cache config area,  including stored version of operational context.
        # implement pipeline support functions of context update verify and echo
        # If explicit files were specified,  do not update cache config.
        if self.args.files and self.args.output_dir:
            log.verbose_warning("Used explicit --files list and --output-dir,  skipping cache server_config update including default context and bad files.")
        else:
            self.update_context()

        self.report_stats()
        log.standard_status()
        return log.errors()
Esempio n. 8
0
    def main(self):
        """Perform the differencing."""
        self.args.files = [ self.args.old_file, self.args.new_file ]   # for defining self.observatory
        self.old_file = self.locate_file(self.args.old_file)
        self.new_file = self.locate_file(self.args.new_file)
        if self.args.brief:
            self.args.lowest_mapping_only = True
            self.args.remove_paths = True
            self.args.hide_boring_diffs = True
            self.args.include_header_diffs = True
        if self.args.sync_files:
            assert not (self.args.cache1 or self.args.cache2), \
                "--sync-files is not compatible with cache-to-cache differences."
            if self.args.print_all_new_files:
                serial_old = naming.newstyle_serial(self.old_file)
                serial_new = naming.newstyle_serial(self.new_file) + 1
                if None not in [serial_old, serial_new]:
                    errs = sync.SyncScript("crds.sync --range {0}:{1}".format(serial_old, serial_new))()
                    assert not errs, "Errors occurred while syncing all rules to CRDS cache."
                else:
                    log.warning("Cannot sync non-standard mapping names,  results may be incomplete.")
            else:
                self.sync_files([self.old_file, self.new_file])
        elif self.args.print_all_new_files:
            log.warning("--print-all-new-files requires a complete set of rules.  suggest --sync-files.")

        # self.args.files = [ self.old_file, self.new_file ]   # for defining self.observatory

        assert (self.args.cache1 and self.args.cache2) or (not self.args.cache1 and not self.args.cache2), \
            "Cache-to-cache comparison requires both --cache1 and --cache2;  otherwise neither for single cache comparison."

        if self.args.print_new_files:
            status = self.print_new_files()
        elif self.args.print_all_new_files:
            status = self.print_all_new_files()
        elif self.args.print_affected_instruments:
            status = self.print_affected_instruments()
        elif self.args.print_affected_types:
            status = self.print_affected_types()
        elif self.args.print_affected_modes:
            status = self.print_affected_modes()
        else:
            status = difference(self.observatory, self.old_file, self.new_file,
                                primitive_diffs=self.args.primitive_diffs,
                                check_diffs=self.args.check_diffs,
                                check_references=self.args.check_references,
                                mapping_text_diffs=self.args.mapping_text_diffs,
                                include_header_diffs=self.args.include_header_diffs,
                                hide_boring_diffs=self.args.hide_boring_diffs,
                                recurse_added_deleted=self.args.recurse_added_deleted,
                                lowest_mapping_only=self.args.lowest_mapping_only,
                                remove_paths=self.args.remove_paths,
                                squash_tuples=self.args.squash_tuples,
                                cache1=self.args.cache1,
                                cache2=self.args.cache2)
        if log.errors() or log.warnings():
            return 2
        else:
            return status
Esempio n. 9
0
    def main(self):
        """Perform the differencing."""
        self.args.files = [ self.args.old_file, self.args.new_file ]   # for defining self.observatory
        self.old_file = self.locate_file(self.args.old_file)
        self.new_file = self.locate_file(self.args.new_file)
        if self.args.brief:
            self.args.lowest_mapping_only = True
            self.args.remove_paths = True
            self.args.hide_boring_diffs = True
            self.args.include_header_diffs = True
        if self.args.sync_files:
            assert not (self.args.cache1 or self.args.cache2), \
                "--sync-files is not compatible with cache-to-cache differences."
            if self.args.print_all_new_files:
                serial_old = naming.newstyle_serial(self.old_file)
                serial_new = naming.newstyle_serial(self.new_file) + 1
                if None not in [serial_old, serial_new]:
                    errs = sync.SyncScript("crds.sync --range {0}:{1}".format(serial_old, serial_new))()
                    assert not errs, "Errors occurred while syncing all rules to CRDS cache."
                else:
                    log.warning("Cannot sync non-standard mapping names,  results may be incomplete.")
            else:
                self.sync_files([self.old_file, self.new_file])
        elif self.args.print_all_new_files:
            log.warning("--print-all-new-files requires a complete set of rules.  suggest --sync-files.")
            
        # self.args.files = [ self.old_file, self.new_file ]   # for defining self.observatory
    
        assert (self.args.cache1 and self.args.cache2) or (not self.args.cache1 and not self.args.cache2), \
            "Cache-to-cache comparison requires both --cache1 and --cache2;  otherwise neither for single cache comparison."

        if self.args.print_new_files:
            status = self.print_new_files()
        elif self.args.print_all_new_files:
            status = self.print_all_new_files()
        elif self.args.print_affected_instruments:
            status = self.print_affected_instruments()
        elif self.args.print_affected_types:
            status = self.print_affected_types()
        elif self.args.print_affected_modes:
            status = self.print_affected_modes()
        else:
            status = difference(self.observatory, self.old_file, self.new_file, 
                                primitive_diffs=self.args.primitive_diffs, 
                                check_diffs=self.args.check_diffs,
                                check_references=self.args.check_references,
                                mapping_text_diffs=self.args.mapping_text_diffs,
                                include_header_diffs=self.args.include_header_diffs,
                                hide_boring_diffs=self.args.hide_boring_diffs,
                                recurse_added_deleted=self.args.recurse_added_deleted,
                                lowest_mapping_only=self.args.lowest_mapping_only,
                                remove_paths=self.args.remove_paths,
                                squash_tuples=self.args.squash_tuples,
                                cache1=self.args.cache1,
                                cache2=self.args.cache2)
        if log.errors() or log.warnings():
            return 2
        else:
            return status
Esempio n. 10
0
 def main(self):
     for file_ in self.files:
         with log.error_on_exception("Checksum operation FAILED"):
             if self.args.remove:
                 remove_checksum(file_)
             elif self.args.verify:
                 verify_checksum(file_)
             else:
                 add_checksum(file_)
     return log.errors()
Esempio n. 11
0
 def main(self):
     """Process command line parameters in to a context and list of
     reference files.   Print out the match tuples within the context
     which contain the reference files.
     """
     if not self.args.files:
         self.print_help()
         sys.exit(-1)
     self.print_mappings_using_files()
     return log.errors()
Esempio n. 12
0
 def main(self):
     """Process command line parameters in to a context and list of
     reference files.   Print out the match tuples within the context
     which contain the reference files.
     """
     if self.matched_files:
         self.dump_reference_matches()
     elif self.args.datasets or self.args.instrument:
         self.dump_dataset_headers()
     else:
         self.print_help()
         log.error("Specify --files to dump reference match cases or --datasets to dump dataset matching parameters.")
     return log.errors()
Esempio n. 13
0
 def main(self):
     """Top level processing method."""
     self.require_server_connection()
     if self.args.list_history:
         return self.list_history()
     if self.args.reset:
         return self.reset_last_processed()
     effects = self.polled()
     ids = self.process(effects)
     self.use_all_ids(effects, ids)
     self.log_all_ids(effects, ids)
     if effects:
         self.save_last_processed(effects)
     return log.errors()
Esempio n. 14
0
 def main(self):
     """Top level processing method."""    
     self.require_server_connection()
     if self.args.list_history:
         return self.list_history()
     if self.args.reset:
         return self.reset_last_processed()
     effects = self.polled()
     ids = self.process(effects)
     self.use_all_ids(effects, ids)
     self.log_all_ids(effects, ids)
     if effects:
         self.save_last_processed(effects)
     return log.errors()
Esempio n. 15
0
    def main(self):

        if self.args.best_effort:
            config.PASS_INVALID_VALUES.set(True)           # JWST SSB cal code data model
            config.ALLOW_BAD_USEAFTER.set(True)            # Don't fail for bad USEAFTER values
            config.ALLOW_SCHEMA_VIOLATIONS.set(True)       # Don't fail for data model bad value errors
            config.ALLOW_BAD_PARKEY_VALUES.set(True)       # Don't fail for values which don't pass DM + .tpn checking
        
        if self.args.rmaps:   # clean up dead lines from file lists
            self.args.rmaps = [ self.resolve_context(mapping) for mapping in self.args.rmaps if mapping.strip() ]

        if self.args.references:
            self.args.references = [self.locate_file(reference) for reference in self.args.references]

        with log.error_on_exception("Refactoring operation FAILED"):
            if self.args.command == "insert_reference":
                if self.args.old_rmap:
                    old_rmap, new_rmap = self.resolve_context(self.args.old_rmap), self.resolve_context(self.args.new_rmap)
                    rmap_insert_references(old_rmap, new_rmap, self.args.references)
                else:
                    self.insert_references()  # figure it all out relative to --source-context
            elif self.args.command == "delete_reference":
                old_rmap, new_rmap = self.resolve_context(self.args.old_rmap), self.resolve_context(self.args.new_rmap)
                rmap_delete_references(old_rmap, new_rmap, self.args.references)
            elif self.args.command == "del_header":
                self.del_header_key()
            elif self.args.command == "set_header":
                self.set_header_key()
            elif self.args.command == "del_parameter":
                self.del_parameter()
            elif self.args.command == "set_parkey":
                self.set_parkey()
            elif self.args.command == "replace_text":
                self.replace_text()
            elif self.args.command == "set_substitution":
                self.set_substitution()
            elif self.args.command == "cat":
                self.cat()
            elif self.args.command == "add_useafter":
                self.add_useafter()
            elif self.args.command == "diff_rmaps":
                self.diff_rmaps()
            elif self.args.command == "certify_rmaps":
                self.certify_rmaps()
            else:
                raise ValueError("Unknown refactoring command: " + repr(self.args.command))

        log.standard_status()
        return log.errors()
Esempio n. 16
0
    def main(self):
        """List files."""
        if self.args.cat is not None:  # including []
            return self.cat_files()

        if self.args.tpns:
            return self.list_tpns()

        if self.args.file_properties is not None:  # including []
            return self.list_file_properties()

        if self.args.operational_context:
            print(self.default_context)
            return
        if self.args.remote_context:
            print(self.remote_context)
            return

        if self.args.resolve_contexts:
            self.list_resolved_contexts()

        if self.args.list_references:
            self.list_references()
        if self.args.list_mappings:
            self.list_mappings()
        if self.args.cached_references:
            self.list_cached_references()
        if self.args.cached_mappings:
            self.list_cached_mappings()
        if self.args.cached_pickles:
            self.list_cached_pickles()

        if self.args.dataset_ids:
            self.list_dataset_ids()
        if self.args.dataset_headers:
            self.list_dataset_headers()

        if self.args.config:
            self.list_config()
        if self.args.status:
            self.list_status()

        if self.args.required_parkeys:
            self.list_required_parkeys()

        return log.errors()
Esempio n. 17
0
    def update_context(self):
        """Update the CRDS operational context in the cache.  Handle pipeline-specific
        targeted features of (a) verifying a context switch as actually recorded in
        the local CRDS cache and (b) echoing/pushing the pipeline context back up to the
        CRDS server for tracking using an id/authorization key.

        If errors occurred during the sync and --force_config_update is not set,
        """
        if not log.errors() or self.args.force_config_update:
            if self.args.verify_context_change:
                old_context = heavy_client.load_server_info(self.observatory).operational_context
            heavy_client.update_config_info(self.observatory)
            if self.args.verify_context_change:
                self.verify_context_change(old_context)
            if self.args.push_context:
                self.push_context()
        else:
            log.warning("Errors occurred during sync,  skipping CRDS cache config and context update.")
Esempio n. 18
0
    def update_context(self):
        """Update the CRDS operational context in the cache.  Handle pipeline-specific
        targeted features of (a) verifying a context switch as actually recorded in
        the local CRDS cache and (b) echoing/pushing the pipeline context back up to the
        CRDS server for tracking using an id/authorization key.

        If errors occurred during the sync and --force_config_update is not set,
        """
        if not log.errors() or self.args.force_config_update:
            if self.args.verify_context_change:
                old_context = heavy_client.load_server_info(self.observatory).operational_context
            heavy_client.update_config_info(self.observatory)
            if self.args.verify_context_change:
                self.verify_context_change(old_context)
            if self.args.push_context:
                self.push_context()
        else:
            log.warning("Errors occurred during sync,  skipping CRDS cache config and context update.")
Esempio n. 19
0
 def main(self):
     with log.error_on_exception("Refactoring operation FAILED"):
         if self.args.command == "insert":
             rmap_insert_references(self.old_rmap, self.new_rmap,
                                    self.ref_paths)
         elif self.args.command == "delete":
             rmap_delete_references(self.old_rmap, self.new_rmap,
                                    self.ref_paths)
         elif self.args.command == "set_header":
             field, setting = self.args.references[0], " ".join(
                 self.args.references[1:])
             set_header_value(self.old_rmap, self.new_rmap, field, setting)
         elif self.args.command == "del_header":
             field = self.args.references[0]
             del_header_value(self.old_rmap, self.new_rmap, field)
         else:
             raise ValueError("Unknown refactoring command: " +
                              repr(self.args.command))
     log.standard_status()
     return log.errors()
Esempio n. 20
0
    def main(self):
        """Main control flow of submission directory and request manifest creation."""

        log.divider("setting up", char="=")

        self.require_server_connection()
        
        self.finish_parameters()

        if self.args.logout:
            return self.logout()

        self.submission = self.create_submission()

        self.login()

        if self.args.wipe_existing_files:
            self.wipe_files()

        self.jpoll_key = self.jpoll_open_channel()

        if self.args.submission_kind == "batch":
            submit_future = self.batch_submit_references()
        elif self.args.submission_kind == "certify":
            submit_future = self.certify_files()
        elif self.args.submission_kind == "references":
            submit_future = self.submit_references()
        elif self.args.submission_kind == "mappings":
            submit_future = self.submit_mappings()

        if self.args.monitor_processing:
            monitor_future = self.monitor()

        if self.args.wait_for_completion:
            self.submission_complete(submit_future)

        if self.args.monitor_processing:
            self.monitor_complete(monitor_future)

        log.standard_status()
        return log.errors()
Esempio n. 21
0
    def main(self):
        """Process command line parameters in to a context and list of
        reference files.   Print out the match tuples within the context
        which contain the reference files.
        """
        if self.args.update_db or not os.path.exists(self.sqlite_db_path):
            self.fetch_sqlite_db()

        if self.args.list_tables:
            self.list_tables()

        if self.args.list_columns:
            self.list_columns(self.args.list_columns[0])

        if self.args.list_database_path:
            print(self.sqlite_db_path)

        if self.args.sql_query:
            self.run_query(" ".join(self.args.sql_query))

        return log.errors()
Esempio n. 22
0
    def main(self):

        if self.args.best_effort:
            config.PASS_INVALID_VALUES.set(
                True)  # JWST SSB cal code data model
            config.ALLOW_BAD_USEAFTER.set(
                True)  # Don't fail for bad USEAFTER values
            config.ALLOW_SCHEMA_VIOLATIONS.set(
                True)  # Don't fail for data model bad value errors
            config.ALLOW_BAD_PARKEY_VALUES.set(
                True
            )  # Don't fail for values which don't pass DM + .tpn checking

        if self.args.rmaps:  # clean up dead lines from file lists
            self.args.rmaps = [
                self.resolve_context(mapping) for mapping in self.args.rmaps
                if mapping.strip()
            ]

        if self.args.references:
            self.args.references = [
                self.locate_file(reference)
                for reference in self.args.references
            ]

        with log.error_on_exception("Refactoring operation FAILED"):
            if self.args.command == "insert_reference":
                if self.args.old_rmap:
                    old_rmap, new_rmap = self.resolve_context(
                        self.args.old_rmap), self.resolve_context(
                            self.args.new_rmap)
                    rmap_insert_references(old_rmap, new_rmap,
                                           self.args.references)
                else:
                    self.insert_references(
                    )  # figure it all out relative to --source-context
            elif self.args.command == "delete_reference":
                old_rmap, new_rmap = self.resolve_context(
                    self.args.old_rmap), self.resolve_context(
                        self.args.new_rmap)
                rmap_delete_references(old_rmap, new_rmap,
                                       self.args.references)
            elif self.args.command == "del_header":
                self.del_header_key()
            elif self.args.command == "set_header":
                self.set_header_key()
            elif self.args.command == "del_parameter":
                self.del_parameter()
            elif self.args.command == "set_parkey":
                self.set_parkey()
            elif self.args.command == "replace_text":
                self.replace_text()
            elif self.args.command == "set_substitution":
                self.set_substitution()
            elif self.args.command == "cat":
                self.cat()
            elif self.args.command == "add_useafter":
                self.add_useafter()
            elif self.args.command == "diff_rmaps":
                self.diff_rmaps()
            elif self.args.command == "certify_rmaps":
                self.certify_rmaps()
            else:
                raise ValueError("Unknown refactoring command: " +
                                 repr(self.args.command))

        log.standard_status()
        return log.errors()
Esempio n. 23
0
 def main(self):
     name_map = new_context(self.args.old_pmap, self.args.new_rmap)
     update_header_names(name_map)
     return log.errors()
Esempio n. 24
0
        else:
            add_checksum = False
    return add_checksum


def uniqname(old_path):
    """Rename file named `oldpath` to a newstyle HST uniqname format name.  This function
    is used to integrate uniqname with the HST CRDS servers as the approach for "Auto Rename".
    This function rewrites the original file at a new name/path and removes the original since
    the new file is not only renamed but different.

    Verify FITS compliance and any FITS checksums,  raising an exception on any problem.

    Add FILENAME, ROOTNAME, and HISTORY keywords.
    Preserve any FITS checksums.

    Returns  new_cdbs_style_name : str
    """
    add_checksums = "--add-checksum" if has_checksum(old_path) else ""
    new_name = UniqnameScript(
        "crds.misc.uniqname --files {0} --standard --remove-original --fits-errors {1}"
        .format(old_path, add_checksums))()
    return new_name


# ==============================================================================================

if __name__ == "__main__":
    UniqnameScript()()
    sys.exit(log.errors())
Esempio n. 25
0
                add_checksum = True
                break
        else:
            add_checksum = False
    return add_checksum

def uniqname(old_path):
    """Rename file named `oldpath` to a newstyle HST uniqname format name.  This function
    is used to integrate uniqname with the HST CRDS servers as the approach for "Auto Rename".
    This function rewrites the original file at a new name/path and removes the original since
    the new file is not only renamed but different.

    Verify FITS compliance and any FITS checksums,  raising an exception on any problem.

    Add FILENAME, ROOTNAME, and HISTORY keywords.
    Preserve any FITS checksums.

    Returns  new_cdbs_style_name : str
    """
    add_checksums = "--add-checksum" if has_checksum(old_path) else ""
    new_name = UniqnameScript("crds.misc.uniqname --files {0} --standard --remove-original --fits-errors {1}".format(
        old_path, add_checksums))()
    return new_name

# ==============================================================================================

if __name__ == "__main__":
    UniqnameScript()()
    sys.exit(log.errors())

Esempio n. 26
0
 def main(self):
     name_map = new_context(self.args.old_pmap, self.args.new_rmap)
     update_header_names(name_map)
     return log.errors()