def ingest_files(self): """Copy self.files into the user's ingest directory on the CRDS server.""" stats = self._start_stats() destination = self.submission_info.ingest_dir host, path = destination.split(":") total_size = utils.total_size(self.files) ingest_info = self.get_ingested_files() self.scan_for_nonsubmitted_ingests(ingest_info) remaining_files = self.keep_existing_files(ingest_info, self.files) \ if self.args.keep_existing_files else self.files for i, filename in enumerate(remaining_files): file_size = utils.file_size(filename) log.info("Copy started", repr(filename), "[", i+1, "/", len(self.files), " files ]", "[", utils.human_format_number(file_size), "/", utils.human_format_number(total_size), " bytes ]") self.copy_file(filename, path, destination) stats.increment("bytes", file_size) stats.increment("files", 1) stats.log_status("files", "Copy complete", len(self.files)) stats.log_status("bytes", "Copy complete", total_size) log.divider(func=log.verbose) stats.report() log.divider(char="=")
def ingest_files(self): """Copy self.files into the user's ingest directory on the CRDS server.""" stats = self._start_stats() destination = self.submission_info.ingest_dir host, path = destination.split(":") total_size = utils.total_size(self.files) ingest_info = self.get_ingested_files() self.scan_for_nonsubmitted_ingests(ingest_info) remaining_files = self.keep_existing_files(ingest_info, self.files) \ if self.args.keep_existing_files else self.files for i, filename in enumerate(remaining_files): file_size = utils.file_size(filename) log.info("Copy started", repr(filename), "[", i + 1, "/", len(self.files), " files ]", "[", utils.human_format_number(file_size), "/", utils.human_format_number(total_size), " bytes ]") self.copy_file(filename, path, destination) stats.increment("bytes", file_size) stats.increment("files", 1) stats.log_status("files", "Copy complete", len(self.files)) stats.log_status("bytes", "Copy complete", total_size) log.divider(func=log.verbose) stats.report() log.divider(char="=")
def log_section(section_name, section_value, verbosity=50, log_function=log.verbose, divider_name=None): """Issue log divider bar followed by a corresponding log message.""" log.divider(name=divider_name, verbosity=verbosity, func=log.verbose) log_function(section_name, section_value, verbosity=verbosity + 5)
def _start_stats(self): """Helper method to initialize stats keeping for ingest.""" total_bytes = utils.total_size(self.files) stats = utils.TimingStats(output=log.verbose) stats.start() log.divider(name="ingest files", char="=") log.info("Copying", len(self.files), "file(s) totalling", utils.human_format_number(total_bytes), "bytes") log.divider(func=log.verbose) return stats
def dump_response(self, name, response): """Print out verbose output related to web `response` from activity `name`.""" log_section("headers:\n", response.headers, divider_name=name, verbosity=70) log_section("status_code:", response.status_code, verbosity=50) log_section("text:\n", response.text, verbosity=75) try: json_text = response.json() log_section("json:\n", json_text) except Exception: pass log.divider(func=log.verbose)
def main(self): """Main control flow of submission directory and request manifest creation.""" log.divider("monitoring server on " + repr(self.args.key), char="=") exit_flag = False while not exit_flag: for message in self._poll_status(): handler = getattr(self, "handle_" + message.type, self.handle_unknown) exit_flag = handler(message) time.sleep(self.args.poll_delay) log.divider("monitoring server done", char="=") return exit_flag
def wipe_files(self): """Copy self.files into the user's ingest directory on the CRDS server.""" destination = self.submission_info.ingest_dir log.divider(name="wipe files", char="=") log.info("Wiping files at", repr(destination)) host, path = destination.split(":") if destination.startswith(socket.gethostname()): output = pysh.out_err("rm -vf ${path}/*") else: output = pysh.out_err("ssh ${host} rm -vf ${path}/*") if output: log.verbose(output)
def main(self): """Main control flow of submission directory and request manifest creation.""" log.divider("setting up", char="=") self.require_server_connection() self.finish_parameters() if self.args.logout: return self.logout() self.submission = self.create_submission() self.login() if self.args.wipe_existing_files: self.wipe_files() self.jpoll_key = self.jpoll_open_channel() if self.args.submission_kind == "batch": submit_future = self.batch_submit_references() elif self.args.submission_kind == "certify": submit_future = self.certify_files() elif self.args.submission_kind == "references": submit_future = self.submit_references() elif self.args.submission_kind == "mappings": submit_future = self.submit_mappings() if self.args.monitor_processing: monitor_future = self.monitor() if self.args.wait_for_completion: self.submission_complete(submit_future) if self.args.monitor_processing: self.monitor_complete(monitor_future) log.standard_status() return log.errors()
def trace_compare(self, other, show_equal=False): """Recursively compare object `self` to `other` printing differences and optionally equal members. """ log.divider(repr(self) + ":") for key, value in self.__dict__.items(): try: ovalue = other.__dict__[key] except KeyError: print(key, "not present in other") continue equal = (value == ovalue) if show_equal or not equal: print(key, equal, value, ovalue) if hasattr(value, "_trace_compare"): value._trace_compare(ovalue) for key in other.__dict__: try: self.__dict__[key] except KeyError: print(key, "value not present in self")
def main(): p = crds.get_cached_mapping("hst.pmap") s = pickle.dumps(p) q = pickle.loads(s) p._trace_compare(q) log.divider("p == q --> " + repr(p == q)) log.divider("__getstate__ --> " + repr(p.__getstate__() == q.__getstate__())) log.divider("rmap __getstate__ --> " + repr(p.get_imap("acs").get_rmap("biasfile").__getstate__() == q.get_imap("acs").get_rmap("biasfile").__getstate__()))
def main(): p = crds.get_cached_mapping("hst.pmap") s = pickle.dumps(p) q = pickle.loads(s) p._trace_compare(q) log.divider("p == q --> " + repr(p == q)) log.divider("__getstate__ --> " + repr(p.__getstate__() == q.__getstate__())) log.divider("rmap __getstate__ --> " + repr( p.get_imap("acs").get_rmap("biasfile").__getstate__() == q.get_imap( "acs").get_rmap("biasfile").__getstate__()))
def run_and_profile(name, case, globs={}, locs={}): """Using `name` for a banner and divider, execute code string `case` in the global namespace, both evaled printing result and under the profiler. """ utils.clear_function_caches() log.divider() log.divider(name + " example") log.divider() print(eval(case, globs, locs)) utils.clear_function_caches() log.divider() log.divider(name + " profile") log.divider() cProfile.run(case, "profile.stats") stats = pstats.Stats('profile.stats') stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats(100) os.remove('profile.stats')
def list_required_parkeys(self): """Print out the parkeys required for matching using the specified contexts.""" for name in self.contexts: mapping = crds.get_cached_mapping(name) log.divider(name="Parkeys required for " + repr(mapping.basename), func=log.write) _print_dict("", mapping.get_required_parkeys())
def log_section(section_name, section_value, verbosity=50, log_function=log.verbose, divider_name=None): """Issue log divider bar followed by a corresponding log message.""" log.divider(name=divider_name, verbosity=verbosity, func=log.verbose) log_function(section_name, section_value, verbosity=verbosity+5)
def cleanup(old_state): """Strictly speaking test cleanup is more than restoring CRDS state.""" os.chdir(old_state.pop("OLD_CWD")) config.set_crds_state(old_state) utils.clear_function_caches() # ============================================================================== def run_and_profile(name, case, globs={}, locs={}): """Using `name` for a banner and divider, execute code string `case` in the global namespace, both evaled printing result and under the profiler. """ utils.clear_function_caches() log.divider() log.divider(name + " example") log.divider() print(eval(case, globs, locs)) utils.clear_function_caches() log.divider() log.divider(name + " profile") log.divider() cProfile.run(case, "profile.stats") stats = pstats.Stats('profile.stats') stats.strip_dirs() stats.sort_stats('cumulative') stats.print_stats(100) os.remove('profile.stats')