class ProgressBar(object): """ Creates an IncrementalBar object (from the progress package) with suffix set to show percentage complete to one decimal place and also give an ETA. """ def __init__(self, initialMessage, numOperations): """ Will create in instance of a progress bar. :param initialMessage: Used to set the initial message to be displayed in front of the message bar. This message can be updated after the progress bar has been 'started' by calling the updateMessage method. :param numOperations: The total number of operations that the progress bar will cover. """ self.bar = IncrementalBar(message=initialMessage, max=numOperations, suffix='%(percent).1f%% - %(eta)ds') self.bar.update() def updateProgress(self): """ This will update or move the progress bar. Call this after each step of the operation completes. :return: None """ self.bar.next() def updateMessage(self, message): """Will update the message displayed in front of the progress bar :message: The new message to display :return: None """ self.bar.message = message self.bar.update() def updateStatus(self): """Will update the status of the progress bar :return: None """ self.bar.update() def finish(self): """ This will complete the progress bar to 100%. :return: None """ self.bar.finish()
def find_solutions(self, graph_setting_groups): results = {} # check for solutions for a specific set of interaction settings logging.info("Number of interaction settings groups being processed: " + str(len(graph_setting_groups))) for strength, graph_setting_group in sorted( graph_setting_groups.items(), reverse=True): logging.info("processing interaction settings group with " "strength " + str(strength)) logging.info(str(len(graph_setting_group)) + " entries in this group") logging.info("running with " + str(self.number_of_threads) + " threads...") temp_results = [] bar = IncrementalBar('Propagating quantum numbers...', max=len(graph_setting_group)) bar.update() if self.number_of_threads > 1: with Pool(self.number_of_threads) as p: for result in p.imap_unordered( self.propagate_quantum_numbers, graph_setting_group, 1): temp_results.append(result) bar.next() else: for graph_setting_pair in graph_setting_group: temp_results.append(self.propagate_quantum_numbers( graph_setting_pair)) bar.next() bar.finish() logging.info('Finished!') if strength not in results: results[strength] = [] results[strength].extend(temp_results) for k, v in results.items(): logging.info( "number of solutions for strength (" + str(k) + ") after qn propagation: " + str(sum([len(x[0]) for x in v]))) # remove duplicate solutions, which only differ in the interaction qn S results = remove_duplicate_solutions(results, self.filter_remove_qns, self.filter_ignore_qns) node_non_satisfied_rules = [] solutions = [] for result in results.values(): for (tempsolutions, non_satisfied_laws) in result: solutions.extend(tempsolutions) node_non_satisfied_rules.append(non_satisfied_laws) logging.info("total number of found solutions: " + str(len(solutions))) violated_laws = [] if len(solutions) == 0: violated_laws = analyse_solution_failure(node_non_satisfied_rules) logging.info("violated rules: " + str(violated_laws)) # finally perform combinatorics of identical external edges # (initial or final state edges) and prepare graphs for # amplitude generation match_external_edges(solutions) final_solutions = [] for sol in solutions: final_solutions.extend( perform_external_edge_identical_particle_combinatorics(sol) ) return (final_solutions, violated_laws)
class ReportCompile(object): def __init__(self, job_name, template, **kwargs): self.job_name = job_name self.template = template self.no_artifacts = kwargs.get('no_artifacts', True) self.num_builds = int(kwargs.get('num_builds', composite['num_builds'])) self.minimum_build = int( kwargs.get('minimum_build', composite['min_build'])) self.exclude_builds = [ int(xb) for xb in kwargs.get('exclude_builds', []) ] try: self.work_dir = local(kwargs.get('work_dir', composite['work_dir'])) self.work_dir.ensure(dir=True) except KeyError: self.work_dir = local.mkdtemp() print('Writing composite report to {}'.format( self.work_dir.strpath)) self._progress = None self._queue = Queue() num_workers = 4 for __ in xrange(num_workers): worker = Thread(target=_queue_worker, args=(self, )) worker.daemon = True worker.start() @property def ssh_client(self): c = SSHClient() return c @staticmethod def _best_result(*results): # results should be a list of (result_id, result_value) tuples # result ranking, best to worst results_ranking = ('passed', 'xfailed', 'failed', 'xpassed', 'skipped', 'error') # Go through all the results, returning the best outcome based on results_ranking for result in results_ranking: for result_id, result_value in reversed( sorted(results, key=lambda r: r[0])): if result_value == result: return (result_id, result_value) @staticmethod def _streak(*results): sorted_results = sorted(results, key=lambda r: r[0]) # the value of the highest numbered (and therefore more recent) build latest_result = sorted_results[-1][1] streak = 0 for __, result_value in reversed(sorted_results): if result_value == latest_result: streak += 1 else: break return {'latest_result': latest_result, 'count': streak} def _progress_update(self, item, items_done): if self._progress is None: self._progress = Bar() self._progress.message = '%(index)d/%(max)d' self._progress.suffix = '' if item: items_done[item] = True self._progress.max = len(items_done) self._progress.index = len(filter(None, items_done.values())) with lock: try: self._progress.update() except ZeroDivisionError: pass def _progress_finish(self): self._progress.finish() self._progress = None def compile(self): return self.composite_report() def build_numbers(self): api = trackerbot.api() builds = trackerbot.depaginate( api, api.build.get(job_name=self.job_name, template=self.template)) build_numbers = [] # XXX relying on trackerbot giving us the most recent builds first, should be explicit for build in builds.get('objects', []): if (build['number'] not in self.exclude_builds and build['number'] >= self.minimum_build): build_numbers.append(build['number']) if self.num_builds and len(build_numbers) == self.num_builds: break if build_numbers: print('Pulling reports from builds {}'.format(', '.join( [str(n) for n in build_numbers]))) return build_numbers def template_log_dirs(self): log_dir_tpl = composite['log_dir_tpl'] log_dirs = [] for build_number in self.build_numbers(): log_dirs.append( (build_number, log_dir_tpl.format(self.job_name, build_number))) return log_dirs def test_reports(self): print('Collecting test reports to determine best build nodes') log_dirs = self.template_log_dirs() reports = {} c = self.ssh_client jenkins_host = composite['jenkins_host'] c.connect(jenkins_host, username=credentials['jenkins-result']['username'], password=credentials['jenkins-result']['password'], timeout=10, allow_agent=False, look_for_keys=False, gss_auth=False) builds_done = {} self._progress_update(None, builds_done) for build_number, log_dir in log_dirs: build_work_dir = local(self.work_dir.join(str(build_number))) build_work_dir.ensure(dir=True) _remote = local(log_dir).join('test-report.json').strpath _local = build_work_dir.join('test-report.json').strpath builds_done[build_number] = False self._progress_update(None, builds_done) self._queue.put((_remote, _local, build_number, builds_done)) self._queue.join() self._progress_finish() for build_number, __ in log_dirs: build_work_dir = local(self.work_dir.join(str(build_number))) for path in build_work_dir.visit('*/test-report.json'): try: report = json.load(path.open()) reports[build_number] = report except: # invalid json, skip this report pass return reports def composite_status(self, reports=None): jenkins_host = composite['jenkins_host'] reports = reports or self.test_reports() results = {} # results dict structure: # { # nodeid: { # 'build_results': {build_id_1: build_id_1_result, build_id_2: ...} # 'best_result': (best_build_id, best_build_result) # 'result_url': http://jenkins/path/to/build # 'streak': (latest_build_result, number_of_results_in_a_row) # }, # nodeid: { # ... # } # } for build_number, report in reports: for nodeid, nodedata in report.get('tests', {}).items(): try: # Try to pull the build statuses, skip the node if we can't node_results_temp = nodedata['statuses']['overall'] node_results = results.setdefault(nodeid, {'build_results': {}}) node_results['build_results'][ build_number] = node_results_temp except KeyError: continue for nodeid, nodedata in results.items(): node_results = nodedata['build_results'].items() nodedata['best_result'] = self._best_result(*node_results) nodedata['result_url'] = 'https://{}/job/{}/{}/'.format( jenkins_host, self.job_name, nodedata['best_result'][0]) nodedata['streak'] = self._streak(*node_results) test_counts[nodedata['best_result'][1]] += 1 return results def composite_report(self): reports = self.test_reports() composite_status = self.composite_status(reports.iteritems()) composite_report = {'test_counts': test_counts, 'tests': OrderedDict()} print('Collecting artifacts from best build nodes') # tracking dict for file pull progress remotes_done = {} self._progress_update(None, remotes_done) for nodeid, nodedata in sorted(composite_status.items(), key=lambda s: s[1]['streak']['count'], reverse=True): best_build_number = nodedata['best_result'][0] best_build_test = reports[best_build_number]['tests'][nodeid] composite_report['tests'][nodeid] = best_build_test composite_report['tests'][nodeid]['composite'] = nodedata reports[best_build_number]['tests'][nodeid]['files'] = [] # wait for all the files to arrive before building the report self._queue.join() self._progress_finish() json.dump(composite_report, self.work_dir.join('composite-report.json').open('w'), indent=1) try: passing_percent = ( 100. * (test_counts['passed'] + test_counts['skipped'] + test_counts['xfailed'])) / sum(test_counts.values()) print('Passing percent:', passing_percent) # XXX: Terrible artifactor spoofing happens here. print('Running artifactor reports') r = reporter.ReporterBase() reports_done = {'composite': False, 'provider': False} self._progress_update(None, reports_done) r._run_report(composite_report['tests'], self.work_dir.strpath) self._progress_update('composite', reports_done) r._run_provider_report(composite_report['tests'], self.work_dir.strpath) self._progress_update('provider', reports_done) self._progress_finish() except ZeroDivisionError: print('No tests collected from test reports (?!)') return composite_report def _translate_artifacts_path(self, artifact_path, build_number): preamble = composite['preamble'].format(self.job_name) replacement = composite['replacement'].format(self.job_name, build_number) artifact_remote = artifact_path.replace(preamble, replacement) artifact_local = self.work_dir.join(str(build_number), artifact_path[len(preamble):]) try: assert artifact_remote.startswith(composite['remote_sw']) assert artifact_local.strpath.startswith(self.work_dir.strpath) except AssertionError: print('wat?') print('path', artifact_path) print('remote', artifact_remote) print('local', artifact_local.strpath) return artifact_remote, artifact_local.strpath
class ReportCompile(object): def __init__(self, job_name, template, **kwargs): self.job_name = job_name self.template = template self.no_artifacts = kwargs.get('no_artifacts', True) self.num_builds = int(kwargs.get('num_builds', composite['num_builds'])) self.minimum_build = int(kwargs.get('minimum_build', composite['min_build'])) self.exclude_builds = [int(xb) for xb in kwargs.get('exclude_builds', [])] try: self.work_dir = local(kwargs.get('work_dir', composite['work_dir'])) self.work_dir.ensure(dir=True) except KeyError: self.work_dir = local.mkdtemp() print('Writing composite report to {}'.format(self.work_dir.strpath)) self._progress = None self._queue = Queue() num_workers = 4 for __ in xrange(num_workers): worker = Thread(target=_queue_worker, args=(self,)) worker.daemon = True worker.start() @property def ssh_client(self): c = SSHClient() return c @staticmethod def _best_result(*results): # results should be a list of (result_id, result_value) tuples # result ranking, best to worst results_ranking = ('passed', 'xfailed', 'failed', 'xpassed', 'skipped', 'error') # Go through all the results, returning the best outcome based on results_ranking for result in results_ranking: for result_id, result_value in reversed(sorted(results, key=lambda r: r[0])): if result_value == result: return (result_id, result_value) @staticmethod def _streak(*results): sorted_results = sorted(results, key=lambda r: r[0]) # the value of the highest numbered (and therefore more recent) build latest_result = sorted_results[-1][1] streak = 0 for __, result_value in reversed(sorted_results): if result_value == latest_result: streak += 1 else: break return {'latest_result': latest_result, 'count': streak} def _progress_update(self, item, items_done): if self._progress is None: self._progress = Bar() self._progress.message = '%(index)d/%(max)d' self._progress.suffix = '' if item: items_done[item] = True self._progress.max = len(items_done) self._progress.index = len(filter(None, items_done.values())) with lock: try: self._progress.update() except ZeroDivisionError: pass def _progress_finish(self): self._progress.finish() self._progress = None def compile(self): return self.composite_report() def build_numbers(self): api = trackerbot.api() builds = trackerbot.depaginate(api, api.build.get(job_name=self.job_name, template=self.template) ) build_numbers = [] # XXX relying on trackerbot giving us the most recent builds first, should be explicit for build in builds.get('objects', []): if (build['number'] not in self.exclude_builds and build['number'] >= self.minimum_build): build_numbers.append(build['number']) if self.num_builds and len(build_numbers) == self.num_builds: break if build_numbers: print('Pulling reports from builds {}'.format( ', '.join([str(n) for n in build_numbers]))) return build_numbers def template_log_dirs(self): log_dir_tpl = composite['log_dir_tpl'] log_dirs = [] for build_number in self.build_numbers(): log_dirs.append((build_number, log_dir_tpl.format(self.job_name, build_number))) return log_dirs def test_reports(self): print('Collecting test reports to determine best build nodes') log_dirs = self.template_log_dirs() reports = {} c = self.ssh_client jenkins_host = composite['jenkins_host'] c.connect(jenkins_host, username=credentials['jenkins-result']['username'], password=credentials['jenkins-result']['password'], timeout=10, allow_agent=False, look_for_keys=False, gss_auth=False) builds_done = {} self._progress_update(None, builds_done) for build_number, log_dir in log_dirs: build_work_dir = local(self.work_dir.join(str(build_number))) build_work_dir.ensure(dir=True) _remote = local(log_dir).join('test-report.json').strpath _local = build_work_dir.join('test-report.json').strpath builds_done[build_number] = False self._progress_update(None, builds_done) self._queue.put((_remote, _local, build_number, builds_done)) self._queue.join() self._progress_finish() for build_number, __ in log_dirs: build_work_dir = local(self.work_dir.join(str(build_number))) for path in build_work_dir.visit('*/test-report.json'): try: report = json.load(path.open()) reports[build_number] = report except: # invalid json, skip this report pass return reports def composite_status(self, reports=None): jenkins_host = composite['jenkins_host'] reports = reports or self.test_reports() results = {} # results dict structure: # { # nodeid: { # 'build_results': {build_id_1: build_id_1_result, build_id_2: ...} # 'best_result': (best_build_id, best_build_result) # 'result_url': http://jenkins/path/to/build # 'streak': (latest_build_result, number_of_results_in_a_row) # }, # nodeid: { # ... # } # } for build_number, report in reports: for nodeid, nodedata in report.get('tests', {}).items(): try: # Try to pull the build statuses, skip the node if we can't node_results_temp = nodedata['statuses']['overall'] node_results = results.setdefault(nodeid, {'build_results': {}}) node_results['build_results'][build_number] = node_results_temp except KeyError: continue for nodeid, nodedata in results.items(): node_results = nodedata['build_results'].items() nodedata['best_result'] = self._best_result(*node_results) nodedata['result_url'] = 'https://{}/job/{}/{}/'.format( jenkins_host, self.job_name, nodedata['best_result'][0] ) nodedata['streak'] = self._streak(*node_results) test_counts[nodedata['best_result'][1]] += 1 return results def composite_report(self): reports = self.test_reports() composite_status = self.composite_status(reports.iteritems()) composite_report = { 'test_counts': test_counts, 'tests': OrderedDict() } print('Collecting artifacts from best build nodes') # tracking dict for file pull progress remotes_done = {} self._progress_update(None, remotes_done) for nodeid, nodedata in sorted(composite_status.items(), key=lambda s: s[1]['streak']['count'], reverse=True): best_build_number = nodedata['best_result'][0] best_build_test = reports[best_build_number]['tests'][nodeid] composite_report['tests'][nodeid] = best_build_test composite_report['tests'][nodeid]['composite'] = nodedata reports[best_build_number]['tests'][nodeid]['files'] = [] # wait for all the files to arrive before building the report self._queue.join() self._progress_finish() json.dump(composite_report, self.work_dir.join('composite-report.json').open('w'), indent=1) try: passing_percent = (100. * (test_counts['passed'] + test_counts['skipped'] + test_counts['xfailed'])) / sum(test_counts.values()) print('Passing percent:', passing_percent) # XXX: Terrible artifactor spoofing happens here. print('Running artifactor reports') r = reporter.ReporterBase() reports_done = {'composite': False, 'provider': False} self._progress_update(None, reports_done) r._run_report(composite_report['tests'], self.work_dir.strpath) self._progress_update('composite', reports_done) r._run_provider_report(composite_report['tests'], self.work_dir.strpath) self._progress_update('provider', reports_done) self._progress_finish() except ZeroDivisionError: print('No tests collected from test reports (?!)') return composite_report def _translate_artifacts_path(self, artifact_path, build_number): preamble = composite['preamble'].format(self.job_name) replacement = composite['replacement'].format(self.job_name, build_number) artifact_remote = artifact_path.replace(preamble, replacement) artifact_local = self.work_dir.join(str(build_number), artifact_path[len(preamble):]) try: assert artifact_remote.startswith(composite['remote_sw']) assert artifact_local.strpath.startswith(self.work_dir.strpath) except AssertionError: print('wat?') print('path', artifact_path) print('remote', artifact_remote) print('local', artifact_local.strpath) return artifact_remote, artifact_local.strpath
try: averages = [] variances = [] devations = [] with open(__CONF_FILE_PATH__) as conf_file: global_conf = json.load(conf_file) cards = global_conf['cards'] num_of_cards = len(cards) judge = Judge(cards) bar = IncrementalBar('Spamming the server...', max=__MAX_EPOCH__) bar.update() for i in range(__MAX_EPOCH__): responses = [] errors = [] tmp_conf = copy.deepcopy(global_conf) tmp_conf['epochs'] = i+1 tmp_conf_json = json.dumps(tmp_conf) conf_file = open(__CONF_FILE_PATH__, "w") conf_file.write(tmp_conf_json) conf_file.close() for i in range(__REQUEST_NUM__): try: r = requests.get(url_factory(_A_, _B_))
def find_solutions(self, graph_setting_groups): results = {} # check for solutions for a specific set of interaction settings logging.info( "Number of interaction settings groups being processed: " + str(len(graph_setting_groups)) ) for strength, graph_setting_group in sorted( graph_setting_groups.items(), reverse=True ): logging.info( "processing interaction settings group with " "strength " + str(strength) ) logging.info( str(len(graph_setting_group)) + " entries in this group" ) logging.info( "running with " + str(self.number_of_threads) + " threads..." ) temp_results = [] bar = IncrementalBar( "Propagating quantum numbers...", max=len(graph_setting_group) ) bar.update() if self.number_of_threads > 1: with Pool(self.number_of_threads) as p: for result in p.imap_unordered( self.propagate_quantum_numbers, graph_setting_group, 1 ): temp_results.append(result) bar.next() else: for graph_setting_pair in graph_setting_group: temp_results.append( self.propagate_quantum_numbers(graph_setting_pair) ) bar.next() bar.finish() logging.info("Finished!") if strength not in results: results[strength] = [] results[strength].extend(temp_results) for k, v in results.items(): logging.info( "number of solutions for strength (" + str(k) + ") after qn propagation: " + str(sum([len(x[0]) for x in v])) ) # remove duplicate solutions, which only differ in the interaction qn S results = remove_duplicate_solutions( results, self.filter_remove_qns, self.filter_ignore_qns ) node_non_satisfied_rules = [] solutions = [] for result in results.values(): for (tempsolutions, non_satisfied_laws) in result: solutions.extend(tempsolutions) node_non_satisfied_rules.append(non_satisfied_laws) logging.info("total number of found solutions: " + str(len(solutions))) violated_laws = [] if len(solutions) == 0: violated_laws = analyse_solution_failure(node_non_satisfied_rules) logging.info("violated rules: " + str(violated_laws)) # finally perform combinatorics of identical external edges # (initial or final state edges) and prepare graphs for # amplitude generation match_external_edges(solutions) final_solutions = [] for sol in solutions: final_solutions.extend( perform_external_edge_identical_particle_combinatorics(sol) ) return (final_solutions, violated_laws)