def display_crash_files(crashfile, rerun=False): from nipype.utils.filemanip import loadcrash crash_data = loadcrash(crashfile) node = crash_data['node'] tb = crash_data['traceback'] print("\n") print("File: %s"%crashfile) print("Node: %s"%node) if node.base_dir: print("Working directory: %s" % node.output_dir()) else: print("Node crashed before execution") print("\n") print("Node inputs:") print(node.inputs) print("\n") print("Traceback: ") print(''.join(tb)) print("\n") if rerun: print("Rerunning node") node.base_dir = None node.config['crashdump_dir'] = '/tmp' node.run() print("\n")
def read_crash(path, touch_dir=None): try: crash_report = '\n'.join(loadcrash(path).get('traceback', [])) print(crash_report) except TraitError as e: touched_dir = _touch_trait_error_path(e.args[0]) if (touch_dir and touched_dir != touch_dir) or (not touch_dir): # Disallow repeated attempts to touch the same file read_crash(path, touched_dir)
def load_pklz_traceback(crash_filepath): """Return the traceback message in the given crash file.""" try: data = loadcrash(crash_filepath) except TraitError as te: return str(te) except: raise else: return "\n".join(data["traceback"])
def load_pklz_traceback(crash_filepath): """Return the traceback message in the given crash file.""" try: data = loadcrash(crash_filepath) except TraitError as te: return str(te) except: raise else: return '\n'.join(data['traceback'])
def _read_pkl(path): from nipype.utils.filemanip import loadcrash crash_data = loadcrash(path) data = {'file': path, 'traceback': ''.join(crash_data['traceback'])} if 'node' in crash_data: data['node'] = crash_data['node'] if data['node'].base_dir: data['node_dir'] = data['node'].output_dir() else: data['node_dir'] = "Node crashed before execution" data['inputs'] = sorted(data['node'].inputs.trait_get().items()) return data
def _read_pkl(fname): crash_data = loadcrash(fname) data = {'file': fname, 'traceback': ''.join(crash_data['traceback']).replace("\\n", "<br \>")} if 'node' in crash_data: data['node'] = crash_data['node'] if data['node'].base_dir: data['node_dir'] = data['node'].output_dir() else: data['node_dir'] = "Node crashed before execution" data['inputs'] = sorted(data['node'].inputs.trait_get().items()) return data
def _read_pkl(path): from nipype.utils.filemanip import loadcrash crash_data = loadcrash(path) data = {"file": path, "traceback": "".join(crash_data["traceback"])} if "node" in crash_data: data["node"] = crash_data["node"] if data["node"].base_dir: data["node_dir"] = data["node"].output_dir() else: data["node_dir"] = "Node crashed before execution" data["inputs"] = sorted(data["node"].inputs.trait_get().items()) return data
def display_crash_file(crashfile, rerun, debug, directory): """display crash file content and rerun if required""" from nipype.utils.filemanip import loadcrash crash_data = loadcrash(crashfile) node = None if "node" in crash_data: node = crash_data["node"] tb = crash_data["traceback"] print("\n") print("File: %s" % crashfile) if node: print("Node: %s" % node) if node.base_dir: print("Working directory: %s" % node.output_dir()) else: print("Node crashed before execution") print("\n") print("Node inputs:") print(node.inputs) print("\n") print("Traceback: ") print("".join(tb)) print("\n") if rerun: if node is None: print("No node in crashfile. Cannot rerun") return print("Rerunning node") node.base_dir = directory node.config = {"execution": {"crashdump_dir": "/tmp"}} try: node.run() except: if debug and debug != "ipython": import pdb pdb.post_mortem() else: raise print("\n")
def display_crash_file(crashfile, rerun, debug, directory): """display crash file content and rerun if required""" from nipype.utils.filemanip import loadcrash crash_data = loadcrash(crashfile) node = None if 'node' in crash_data: node = crash_data['node'] tb = crash_data['traceback'] print("\n") print("File: %s" % crashfile) if node: print("Node: %s" % node) if node.base_dir: print("Working directory: %s" % node.output_dir()) else: print("Node crashed before execution") print("\n") print("Node inputs:") print(node.inputs) print("\n") print("Traceback: ") print(''.join(tb)) print("\n") if rerun: if node is None: print("No node in crashfile. Cannot rerun") return print("Rerunning node") node.base_dir = directory node.config = {'execution': {'crashdump_dir': '/tmp'}} try: node.run() except: if debug and debug != 'ipython': import pdb pdb.post_mortem() else: raise print("\n")
def index_error_dir(self, error_dir): ''' Crawl subjects crash directory for the corresponding run and return text for .pklz crash file found. ''' for root, directories, filenames in os.walk(error_dir): for f in filenames: # Only deal with files that start with crash and end in pklz if not (f[:5] == 'crash' and f[-4:] == 'pklz'): continue crash_data = loadcrash(os.path.join(root, f)) error = {} node = None if 'node' in crash_data: node = crash_data['node'] error['traceback'] = ''.join(crash_data['traceback']).replace("\\n", "<br \>") error['file'] = f if node: error['node'] = node if node.base_dir: error['node_dir'] = node.output_dir() else: error['node_dir'] = "Node crashed before execution" error['inputs'] = sorted(node.inputs.trait_get().items()) self.errors.append(error)
def generate_xml_nodes(sourceDir,targetDir,pvc_method,tka_method): listOfNodes = [ {"name" : "pet2mri", "mnc_inputs" : {"node" : "pet2mri", "file" : 'in_target_file'}, "mnc_outputs" : {"node" : "pet2mri", "file" : 'out_file_img'} }]; if pvc_method != None : listOfNodes.append({"name" : "pvc", "mnc_inputs" : {"node" : pvc_method, "file" : 'in_file'}, "mnc_outputs" : {"node" : pvc_method, "file" : 'out_file'} }); if tka_method != None : listOfNodes.append({"name" : "tka", "mnc_inputs" : {"node" : "convertParametric", "file" : 'out_file'}, "mnc_outputs" : {"node" : "pet2mri", "file" : 'in_target_file'} }); filename=targetDir+"/preproc/graph1.json"; fp = file(filename, 'r') data=json.load(fp) fp.close() xmlQC = Element('qc') listVolumes = list(); for subjIdx in range(0,len(data["groups"])): for nodeID in range(data["groups"][subjIdx]["procs"][0],data["groups"][subjIdx]["procs"][-1]): nodeName = "_".join(data["nodes"][nodeID]["name"].split("_")[1:]) if nodeName == "datasourcePET": nodeReport = loadcrash(targetDir+"/preproc/"+data["nodes"][nodeID]["result"]) for key, value in nodeReport.inputs.items(): if key == "cid": cid = str(value) if key == "sid": sid = str(value) xmlscan = SubElement(xmlQC, 'scan') xmlscan.set('sid', sid) xmlscan.set('cid', cid) for x in listOfNodes : xmlnode = SubElement(xmlscan, 'node') xmlnode.set('name', x['name']) for nodeID in range(data["groups"][subjIdx]["procs"][0],data["groups"][subjIdx]["procs"][-1]): nodeName = "_".join(data["nodes"][nodeID]["name"].split("_")[1:]) if nodeName == x["mnc_inputs"]["node"]: nodeReport = loadcrash(targetDir+"/preproc/"+data["nodes"][nodeID]["result"]) xmlmnc = SubElement(xmlnode, 'inMnc') for key, value in nodeReport.inputs.items(): if key in x['mnc_inputs']["file"]: value = value[0] if type(value) == list else value xmlkey = SubElement(xmlmnc, str(key)) xmlkey.text = str(value).replace(sourceDir+"/",'').replace(targetDir+"/",'') listVolumes.append(str(value)) if nodeName == x["mnc_outputs"]["node"]: nodeReport = loadcrash(targetDir+"/preproc/"+data["nodes"][nodeID]["result"]) xmlmnc = SubElement(xmlnode, 'outMnc') for key, value in nodeReport.inputs.items(): if key in x['mnc_outputs']["file"]: value = value[0] if type(value) == list else value xmlkey = SubElement(xmlmnc, str(key)) xmlkey.text = str(value).replace(sourceDir+"/",'').replace(targetDir+"/",'') listVolumes.append(str(value)) with open(targetDir+"/preproc/dashboard/public/nodes.xml","w") as f: f.write(prettify(xmlQC)) for mincfile in listVolumes: rawfile = mincfile+'.raw' headerfile = mincfile+'.header' if not os.path.exists(rawfile) or not os.path.exists(headerfile): adjust_hdr(mincfile) mnc2vol(mincfile)