def _get_pbsnodes_values(orig_file, out_file, write_method): check_empty_file(orig_file) raw_blocks = _read_all_blocks(orig_file) all_pbs_values = [] anonymize = anonymize_func() for block in raw_blocks: pbs_values = dict() pbs_values['domainname'] = block['domainname'] if not options.ANONYMIZE else anonymize(block['domainname'], 'wns') nextchar = block['state'][0] state = (nextchar == 'f') and "-" or nextchar pbs_values['state'] = state try: pbs_values['np'] = block['np'] except KeyError: pbs_values['np'] = block['pcpus'] # handle torque cases # todo : to check if block.get('gpus') > 0: # this should be rare. pbs_values['gpus'] = block['gpus'] try: # this should turn up more often, hence the try/except. _ = block['jobs'] except KeyError: pass else: pbs_values['core_job_map'] = [] jobs = block['jobs'].split(',') for job, core in _get_jobs_cores(jobs): _d = dict() _d['job'] = job _d['core'] = core pbs_values['core_job_map'].append(_d) finally: all_pbs_values.append(pbs_values) return all_pbs_values
def _get_pbsnodes_values(orig_file, out_file, write_method): check_empty_file(orig_file) raw_blocks = _read_all_blocks(orig_file) all_pbs_values = [] anonymize = anonymize_func() for block in raw_blocks: pbs_values = dict() pbs_values['domainname'] = block[ 'domainname'] if not options.ANONYMIZE else anonymize( block['domainname'], 'wns') nextchar = block['state'][0] state = (nextchar == 'f') and "-" or nextchar pbs_values['state'] = state try: pbs_values['np'] = block['np'] except KeyError: pbs_values['np'] = block[ 'pcpus'] # handle torque cases # todo : to check if block.get('gpus') > 0: # this should be rare. pbs_values['gpus'] = block['gpus'] try: # this should turn up more often, hence the try/except. _ = block['jobs'] except KeyError: pass else: pbs_values['core_job_map'] = [] jobs = block['jobs'].split(',') for job, core in _get_jobs_cores(jobs): _d = dict() _d['job'] = job _d['core'] = core pbs_values['core_job_map'].append(_d) finally: all_pbs_values.append(pbs_values) return all_pbs_values
def _get_statq_from_xml(fn, write_method=options.write_method): logging.debug("Parsing tree of %s" % fn) check_empty_file(fn) anonymize = anonymize_func() with open(fn, mode='rb') as fin: try: tree = etree.parse(fin) except etree.ParseError: logging.critical("Something happened during the parsing of the XML file. Exiting...") except: logging.debug("XML file state %s" % fin) logging.debug("thinking...") import sys sys.exit(1) root = tree.getroot() qstatq_list = [] # for queue_elem in root.iter('Queue-List'): # python 2.7-only for queue_elem in root.findall('queue_info/Queue-List'): # queue_name = queue_elem.find('./resource[@name="qname"]').text # python 2.7-only queue_names = queue_elem.findall('resource') for _queue_name in queue_names: if _queue_name.attrib.get('name') == 'qname': queue_name = _queue_name.text if not options.ANONYMIZE else anonymize(_queue_name.text, 'qs') break else: raise ValueError("No such resource") FOUND = False for exist_d in qstatq_list: if queue_name == exist_d['queue_name']: # exist_d['run'] += len(queue_elem.findall('./job_list[@state="running"]')) # python 2.7 only jobs = queue_elem.findall('job_list') run_count = 0 for _run in jobs: if _run.attrib.get('state') == 'running': run_count += 1 exist_d['run'] += run_count FOUND = True break if FOUND: continue d = dict() d['queue_name'] = queue_name try: d['state'] = queue_elem.find('./state').text except AttributeError: d['state'] = '?' except: raise # d['run'] = len(queue_elem.findall('./job_list[@state="running"]')) # python 2.7 only job_lists = queue_elem.findall('job_list') run_count = 0 for _run in job_lists: if _run.attrib.get('state') == 'running': run_count += 1 d['run'] = run_count d['lm'] = 0 d['queued'] = 0 qstatq_list.append(d) total_running_jobs = str(sum([d['run'] for d in qstatq_list])) logging.info('Total running jobs found: %s' % total_running_jobs) for d in qstatq_list: d['run'] = str(d['run']) d['queued'] = str(d['queued']) # total_queued_jobs = str(len(root.findall('.//job_list[@state="pending"]'))) # python 2.7 only total_queued_jobs_elems = root.findall('job_info/job_list') pending_count = 0 for job in total_queued_jobs_elems: if job.attrib.get('state') == 'pending': pending_count += 1 total_queued_jobs = str(pending_count) logging.info('Total queued jobs found: %s' % total_queued_jobs) qstatq_list.append({'run': '0', 'queued': total_queued_jobs, 'queue_name': 'Pending', 'state': 'Q', 'lm': '0'}) logging.debug('qstatq_list contains %s elements' % len(qstatq_list)) # TODO: check validity. 'state' shouldnt just be 'Q'! logging.debug("Closing %s" % fn) return total_running_jobs, total_queued_jobs, qstatq_list
def get_queues_info(self): logging.debug("Parsing tree of %s" % self.sge_file_stat) check_empty_file(self.sge_file_stat) anonymize = self.sge_stat_maker.anonymize_func() tree = self._get_xml_tree(self.sge_file_stat) root = tree.getroot() qstatq_list = [] for queue_elem in root.findall('queue_info/Queue-List'): queue_names = queue_elem.findall('resource') for _queue_name in queue_names: if _queue_name.attrib.get('name') == 'qname': queue_name = _queue_name.text if not options.ANONYMIZE else anonymize(_queue_name.text, 'qs') break else: raise ValueError("No such resource") FOUND = False for exist_d in qstatq_list: if queue_name == exist_d['queue_name']: jobs = queue_elem.findall('job_list') run_count = 0 for _run in jobs: if _run.attrib.get('state') == 'running': run_count += 1 exist_d['run'] += run_count FOUND = True break if FOUND: continue d = dict() d['queue_name'] = queue_name try: d['state'] = queue_elem.find('./state').text except AttributeError: d['state'] = '?' except: raise job_lists = queue_elem.findall('job_list') run_count = 0 for _run in job_lists: if _run.attrib.get('state') == 'running': run_count += 1 d['run'] = run_count d['lm'] = 0 d['queued'] = 0 qstatq_list.append(d) total_running_jobs = sum([d['run'] for d in qstatq_list]) logging.info('Total running jobs found: %s' % total_running_jobs) for d in qstatq_list: d['run'] = str(d['run']) d['queued'] = str(d['queued']) total_queued_jobs_elems = root.findall('job_info/job_list') pending_count = 0 for job in total_queued_jobs_elems: if job.attrib.get('state') == 'pending': pending_count += 1 total_queued_jobs = str(pending_count) logging.info('Total queued jobs found: %s' % total_queued_jobs) qstatq_list.append({'run': '0', 'queued': total_queued_jobs, 'queue_name': 'Pending', 'state': 'Q', 'lm': '0'}) logging.debug('qstatq_list contains %s elements' % len(qstatq_list)) # TODO: check validity. 'state' shouldnt just be 'Q'! logging.debug("Closing %s" % self.sge_file_stat) return total_running_jobs, int(eval(str(total_queued_jobs))), qstatq_list