def sns(s_info, opt_dict): s_info.update() ret_list = [time.ctime()] s_info.build_luts() node_list = sge_tools.build_node_list(s_info, opt_dict) left_justified = { "host", "queue", "queues", "node", "seqno", "state", "type", "complex", "pe_list", "userlists", "projects", "jobs" } short_dict = { # one queue per line # "slot_info": "si", # for merged info "slots_used": "su", "slots_reserved": "sr", "slots_total": "st", } out_list = logging_tools.NewFormList() for cur_node in node_list: out_list.append([ logging_tools.form_entry(cur_el.text, header=short_dict.get( cur_el.tag, cur_el.tag), left=cur_el.tag in left_justified) for cur_el in cur_node ]) if out_list: ret_list.append(str(out_list)) if opt_dict.interactive: return "\n".join(ret_list) else: print("\n".join(ret_list))
def _update_nodes(self): s_time = time.time() self._update() _node_res = sge_tools.build_node_list(self.__sge_info, self.__node_options) _run_res = sge_tools.build_running_list(self.__sge_info, self.__run_options) self.generate_slotinfo(_node_res, _run_res) # rms_accounting_run.objects.exclude(Q(aggregation_level=RMSAggregationLevelEnum.none.value.short)).delete() self.aggregate_accounting() e_time = time.time() self.log("update() call took {}".format( logging_tools.get_diff_time_str(e_time - s_time)))
def post(self, request): from initat.tools import sge_tools _post = request.POST my_sge_info = get_sge_info() my_sge_info.update() _salt_addons(request) rms_info = _fetch_rms_info(request) # print etree.tostring(run_job_list, pretty_print=True) node_list = sge_tools.build_node_list(my_sge_info, get_node_options(request)) if RMS_ADDONS: for change_obj in RMS_ADDONS: change_obj.modify_nodes(my_sge_info, node_list) # load values # get name of all hosts _host_names = node_list.xpath(".//node/host/text()") # resolve to full host names / dev_pks / uuids _dev_dict = { _name: { "uuid": _uuid, "values": {}, # core id -> job list "pinning": {}, "idx": _idx, } for _name, _uuid, _idx in device.objects.filter( Q(name__in=_host_names)).values_list("name", "uuid", "idx") } # reverse lut (idx -> name) _rev_lut = {_value["idx"]: _key for _key, _value in _dev_dict.items()} mv_dict = mc_helper.get_weathermap_data( [_struct["uuid"] for _struct in _dev_dict.values()]) for _name, _struct in _dev_dict.items(): _struct["values"] = { _entry["key"]: _entry["value"] for _entry in mv_dict.get(_struct["uuid"], []) } fc_dict = {} cur_time = time.time() # job_ids = my_sge_info.get_tree().xpath(".//job_list[master/text() = \"MASTER\"]/@full_id", smart_strings=False) for file_el in my_sge_info.get_tree().xpath( ".//job_list[master/text() = \"MASTER\"]", smart_strings=False): file_contents = file_el.findall(".//file_content") if len(file_contents): cur_fcd = [] for cur_fc in file_contents: file_name = cur_fc.attrib["name"] content = cache.get(cur_fc.attrib["cache_uuid"]) if content is not None: lines = content.replace(r"\r\n", r"\n").split("\n") content = "\n".join(reversed(lines)) cur_fcd.append({ "name": file_name, "content": content, "size": len(content), "last_update": int(cur_fc.attrib.get("last_update", cur_time)), "disp_len": min(10, len(lines) + 1), }) fc_dict[file_el.attrib["full_id"]] = list( reversed(sorted(cur_fcd, key=lambda x: x["last_update"]))) for job_el in my_sge_info.get_tree().xpath( ".//job_list[master/text() = \"MASTER\"]", smart_strings=False): job_id = job_el.attrib["full_id"] pinning_el = job_el.find(".//pinning_info") if pinning_el is not None and pinning_el.text: # device_id -> process_id -> core_id _pd = json.loads(pinning_el.text) for _node_idx, _pin_dict in _pd.items(): if int(_node_idx) in _rev_lut: _dn = _rev_lut[int(_node_idx)] for _proc_id, _core_id in _pin_dict.items(): _dev_dict[_dn]["pinning"].setdefault( _core_id, []).append(job_id) _gsi = my_sge_info.tree.find(".//global_waiting_info") if _gsi is not None: _g_msgs = [{ "value": _line } for _line in self.optimize_list([ self.node_to_value(el)["value"] for el in _gsi.findall(".//message") ])] else: _g_msgs = [] # import pprint # pprint.pprint(self.sort_list(rms_info.wait_job_list)) json_resp = { "run_table": self.sort_list(rms_info.run_job_list), "wait_table": self.sort_list(rms_info.wait_job_list), "node_table": self.sort_list(node_list), "sched_conf": sge_tools.build_scheduler_info(my_sge_info), "files": fc_dict, "fstree": sge_tools.build_fstree_info(my_sge_info), "node_values": _dev_dict, "global_waiting_info": _g_msgs, } return HttpResponse(json.dumps(json_resp), content_type="application/json")
def post(self, request): _post = request.POST my_sge_info.update() _salt_addons(request) rms_info = _fetch_rms_info(request) # print etree.tostring(run_job_list, pretty_print=True) node_list = sge_tools.build_node_list(my_sge_info, get_node_options(request)) if RMS_ADDONS: for change_obj in RMS_ADDONS: change_obj.modify_nodes(my_sge_info, node_list) # load values # get name of all hosts _host_names = node_list.xpath(".//node/host/text()") # memcache client _mcc = memcache.Client(["{}:{:d}".format(MC_ADDRESS, MC_PORT)]) h_dict_raw = _mcc.get("cc_hc_list") if h_dict_raw: h_dict = json.loads(h_dict_raw) else: h_dict = {} # required keys req_keys = re.compile( "^(load\.(1|5|15)$)|(mem\.(avail|free|used)\..*)$") # resolve to full host names / dev_pks / uuids _dev_dict = { _name: { "uuid": _uuid, "values": {}, # core id -> job list "pinning": {}, "idx": _idx, } for _name, _uuid, _idx in device.objects.filter( Q(name__in=_host_names)).values_list("name", "uuid", "idx") } # reverse lut (idx -> name) _rev_lut = { _value["idx"]: _key for _key, _value in _dev_dict.iteritems() } for _name, _struct in _dev_dict.iteritems(): if _struct["uuid"] in h_dict: _value_list = json.loads( _mcc.get("cc_hc_{}".format(_struct["uuid"]))) for _list in _value_list: if req_keys.match(_list[1]): _struct["values"][_list[1]] = _list[5] * _list[7] fc_dict = {} cur_time = time.time() for file_el in my_sge_info.get_tree().xpath( ".//job_list[master/text() = \"MASTER\"]", smart_strings=False): file_contents = file_el.findall(".//file_content") if len(file_contents): cur_fcd = [] for cur_fc in file_contents: file_name = cur_fc.attrib["name"] content = cache.get(cur_fc.attrib["cache_uuid"]) if content is not None: lines = content.replace(r"\r\n", r"\n").split("\n") content = "\n".join(reversed(lines)) cur_fcd.append(( file_name, content, len(content), int(cur_fc.attrib.get("last_update", cur_time)), min(10, len(lines) + 1), )) fc_dict[file_el.attrib["full_id"]] = list( reversed(sorted(cur_fcd, cmp=lambda x, y: cmp(x[3], y[3])))) for job_el in my_sge_info.get_tree().xpath( ".//job_list[master/text() = \"MASTER\"]", smart_strings=False): job_id = job_el.attrib["full_id"] pinning_el = job_el.find(".//pinning_info") if pinning_el is not None and pinning_el.text: # device_id -> process_id -> core_id _pd = json.loads(pinning_el.text) for _node_idx, _pin_dict in _pd.iteritems(): if int(_node_idx) in _rev_lut: _dn = _rev_lut[int(_node_idx)] for _proc_id, _core_id in _pin_dict.iteritems(): _dev_dict[_dn]["pinning"].setdefault( _core_id, []).append(job_id) # pprint.pprint(pinning_dict) # todo: add jobvars to running (waiting for rescheduled ?) list # print dir(rms_info.run_job_list) # pprint.pprint(_done_ser) json_resp = { "run_table": _sort_list(rms_info.run_job_list), "wait_table": _sort_list(rms_info.wait_job_list), "node_table": _sort_list(node_list), "sched_conf": sge_tools.build_scheduler_info(my_sge_info), "files": fc_dict, "fstree": sge_tools.build_fstree_info(my_sge_info), "node_values": _dev_dict, } return HttpResponse(json.dumps(json_resp), content_type="application/json")
def _update_nodes(self): self._update() _res = sge_tools.build_node_list(self.__sge_info, self.__node_options) self._generate_slotinfo(_res)