def main(self, argv): args = self._parse_args(argv) profile_tools = getattr(args, 'profile_tools', None) trace_tools = getattr(args, 'trace_tools', None) data_files = [] trial_numbers = [] for num in getattr(args, 'trial_numbers', []) + getattr( args, 'data_files', []): if os.path.exists(num): data_files.append(num) else: try: trial_numbers.append(int(num)) except ValueError: self.parser.error("Invalid trial number: %s" % num) tau = TauInstallation.get_minimal() dataset = {} if not (data_files or trial_numbers): expr = Project.selected().experiment() for fmt, path in expr.trials()[0].get_data_files().iteritems(): dataset[fmt] = [path] elif trial_numbers: expr = Project.selected().experiment() for trial in expr.trials(trial_numbers): for fmt, path in trial.get_data_files().iteritems(): dataset.setdefault(fmt, []).append(path) for path in data_files: fmt = tau.get_data_format(path) dataset.setdefault(fmt, []).append(path) return tau.show_data_files(dataset, profile_tools, trace_tools)
def main(self, argv): args = self._parse_args(argv) profile_tools = getattr(args, 'profile_tools', None) trace_tools = getattr(args, 'trace_tools', None) data_files = [] trial_numbers = [] for num in getattr(args, 'trial_numbers', []) + getattr(args, 'data_files', []): if os.path.exists(num): data_files.append(num) else: try: trial_numbers.append(int(num)) except ValueError: self.parser.error("Invalid trial number: %s" % num) tau = TauInstallation.get_minimal() dataset = {} if not (data_files or trial_numbers): expr = Project.selected().experiment() for fmt, path in expr.trials()[0].get_data_files().iteritems(): dataset[fmt] = [path] elif trial_numbers: expr = Project.selected().experiment() for trial in expr.trials(trial_numbers): for fmt, path in trial.get_data_files().iteritems(): dataset.setdefault(fmt, []).append(path) for path in data_files: fmt = tau.get_data_format(path) dataset.setdefault(fmt, []).append(path) return tau.show_data_files(dataset, profile_tools, trace_tools)
def export(self, dest): """Export experiment trial data. Args: dest (str): Path to directory to contain exported data. Raises: ConfigurationError: This trial has no data. """ expr = self.populate('experiment') if self.get('data_size', 0) <= 0: raise ConfigurationError( "Trial %s of experiment '%s' has no data" % (self['number'], expr['name'])) data = self.get_data_files() stem = '%s.trial%d' % (expr['name'], self['number']) for fmt, path in data.iteritems(): if fmt == 'tau': export_file = os.path.join(dest, stem + '.ppk') tau = TauInstallation.get_minimal() tau.create_ppk_file(export_file, path) elif fmt == 'merged': export_file = os.path.join(dest, stem + '.xml.gz') util.create_archive('gz', export_file, [path]) elif fmt == 'cubex': export_file = os.path.join(dest, stem + '.cubex') LOGGER.info("Writing '%s'...", export_file) util.copy_file(path, export_file) elif fmt == 'slog2': export_file = os.path.join(dest, stem + '.slog2') LOGGER.info("Writing '%s'...", export_file) util.copy_file(path, export_file) elif fmt == 'otf2': export_file = os.path.join(dest, stem + '.tgz') expr_dir, trial_dir = os.path.split(os.path.dirname(path)) items = [ os.path.join(trial_dir, item) for item in 'traces', 'traces.def', 'traces.otf2' ] util.create_archive('tgz', export_file, items, expr_dir) elif fmt != 'none': raise InternalError("Unhandled data file format '%s'" % fmt)
def _postprocess_slog2(self): slog2 = os.path.join(self.prefix, 'tau.slog2') if os.path.exists(slog2): return tau = TauInstallation.get_minimal() merged_trc = os.path.join(self.prefix, 'tau.trc') merged_edf = os.path.join(self.prefix, 'tau.edf') if not os.path.exists(merged_trc) or not os.path.exists(merged_edf): tau.merge_tau_trace_files(self.prefix) tau.tau_trace_to_slog2(merged_trc, merged_edf, slog2) trc_files = glob.glob(os.path.join(self.prefix, '*.trc')) edf_files = glob.glob(os.path.join(self.prefix, '*.edf')) count_trc_edf = len(trc_files) + len(edf_files) LOGGER.info('Cleaning up TAU trace files...') with ProgressIndicator("", total_size=count_trc_edf) as progress_bar: count = 0 for path in trc_files + edf_files: os.remove(path) count += 1 progress_bar.update(count)
def _postprocess_slog2(self): slog2 = os.path.join(self.prefix, 'tau.slog2') if os.path.exists(slog2): return tau = TauInstallation.get_minimal() merged_trc = os.path.join(self.prefix, 'tau.trc') merged_edf = os.path.join(self.prefix, 'tau.edf') if not os.path.exists(merged_trc) or not os.path.exists(merged_edf): tau.merge_tau_trace_files(self.prefix) tau.tau_trace_to_slog2(merged_trc, merged_edf, slog2) trc_files = glob.glob(os.path.join(self.prefix, '*.trc')) edf_files = glob.glob(os.path.join(self.prefix, '*.edf')) count_trc_edf = len(trc_files) + len(edf_files) LOGGER.info('Cleaning up TAU trace files...') with ProgressIndicator(count_trc_edf) as progress_bar: count = 0 for path in trc_files + edf_files: os.remove(path) count += 1 progress_bar.update(count)
def export(self, dest): """Export experiment trial data. Args: dest (str): Path to directory to contain exported data. Raises: ConfigurationError: This trial has no data. """ expr = self.populate('experiment') if self.get('data_size', 0) <= 0: raise ConfigurationError("Trial %s of experiment '%s' has no data" % (self['number'], expr['name'])) data = self.get_data_files() stem = '%s.trial%d' % (expr['name'], self['number']) for fmt, path in data.iteritems(): if fmt == 'tau': export_file = os.path.join(dest, stem+'.ppk') tau = TauInstallation.get_minimal() tau.create_ppk_file(export_file, path) elif fmt == 'merged': export_file = os.path.join(dest, stem+'.xml.gz') util.create_archive('gz', export_file, [path]) elif fmt == 'cubex': export_file = os.path.join(dest, stem+'.cubex') LOGGER.info("Writing '%s'...", export_file) util.copy_file(path, export_file) elif fmt == 'slog2': export_file = os.path.join(dest, stem+'.slog2') LOGGER.info("Writing '%s'...", export_file) util.copy_file(path, export_file) elif fmt == 'otf2': export_file = os.path.join(dest, stem+'.tgz') expr_dir, trial_dir = os.path.split(os.path.dirname(path)) items = [os.path.join(trial_dir, item) for item in 'traces', 'traces.def', 'traces.otf2'] util.create_archive('tgz', export_file, items, expr_dir) elif fmt != 'none': raise InternalError("Unhandled data file format '%s'" % fmt)