def rvapi_shutdown(self, amopt): """Return any results to jscofe Parameters ---------- amopt : dict AMPLE results dictionary with all information """ rvdoc = amopt['rvapi_document'] if not rvdoc: return work_dir = amopt['work_dir'] # Create dictionary we're going to return meta = {'first_tab_id': self.summary_tab_id, 'results': []} nresults = 0 if 'mrbump_results' in amopt and amopt['mrbump_results']: mrb_results = amopt['mrbump_results'] nresults = min(3, len(mrb_results)) if nresults > 0: root = os.path.join(work_dir, "..") for fdata in mrbump_util.ResultsSummary( mrb_results[:nresults]).topFiles(nresults): # Mangle paths fdata['pdb'] = os.path.relpath(root, fdata['pdb']) fdata['mtz'] = os.path.relpath(root, fdata['mtz']) meta['results'].append(fdata) # Commit to file logger.debug("Exporting pyrvapi metadata:\n{0}".format(meta)) pyrvapi.rvapi_put_meta(json.dumps(meta)) pyrvapi.rvapi_store_document2(rvdoc) return
def rvapi_shutdown(self, amopt): """Return any results to jscofe Parameters ---------- amopt : dict AMPLE results dictionary with all information """ rvdoc = amopt['rvapi_document'] if not rvdoc: return # Create dictionary we're going to return meta = {'results': []} nresults = 0 if bool(amopt.get('mrbump_results')): mrb_results = amopt['mrbump_results'] nresults = min(3, len(mrb_results)) if nresults > 0: for fdata in mrbump_util.ResultsSummary( mrb_results[:nresults]).topFiles(nresults): # Mangle paths. relpath assumes args are directories so need to add .. fdata['pdb'] = self.fix_path(fdata['pdb']) fdata['mtz'] = self.fix_path(fdata['mtz']) meta['results'].append(fdata) # Commit to file logger.debug("Exporting pyrvapi metadata:\n{0}".format(meta)) pyrvapi.rvapi_put_meta(json.dumps(meta)) pyrvapi.rvapi_store_document2(rvdoc) return
def create_results_tab(self, ample_dict): if self.ccp4i2 or not self.summary_tab_id or not self._got_mrbump_results( ample_dict): return mrb_results = ample_dict.get('mrbump_results') if mrb_results == self.old_mrbump_results: return self.old_mrbump_results = mrb_results if not self.results_tab_id: self.results_tab_id = "results_tab" pyrvapi.rvapi_insert_tab(self.results_tab_id, "Results", self.summary_tab_id, False) # Delete old sections: pyrvapi.rvapi_flush() for section_id in self.results_tab_sections: pyrvapi.rvapi_remove_widget(section_id) pyrvapi.rvapi_flush() self.results_tab_sections = [] ensemble_results = ample_dict[ 'ensembles_data'] if 'ensembles_data' in ample_dict[ 'ensembles_data'] else None mrbsum = mrbump_util.ResultsSummary( results=mrb_results[0:min(len(mrb_results), mrbump_util.TOP_KEEP)]) mrbsum.sortResults(prioritise="SHELXE_CC") self.results_section( self.results_tab_id, mrbsum.results, ensemble_results, "Top {0} SHELXE Results".format(mrbump_util.TOP_KEEP)) mrbsum.sortResults(prioritise="PHASER_TFZ") # Add seperator between results - doesn't work as not deleted on refresh # pyrvapi.rvapi_add_text("<br/><hr/><br/>", self.results_tab_id, 0, 0, 1, 1) self.results_section( self.results_tab_id, mrbsum.results, ensemble_results, "Top {0} PHASER Results".format(mrbump_util.TOP_KEEP)) return self.results_tab_id
def create_summary_tab(self, ample_dict): self._create_summary_tab() if self.do_create_ensembles_section(ample_dict): self.create_ensembles_section(ample_dict) if not self._got_mrbump_results(ample_dict): return self.summary_tab_id if not self.summary_tab_results_sec_id: self.rm_pending_section() # Only create the table once self.summary_tab_results_sec_id = "mrbump" pyrvapi.rvapi_add_section(self.summary_tab_results_sec_id, "MRBUMP", self.summary_tab_id, 0, 0, 1, 1, True) self.summary_tab_results_sec_table_id = "mrbump_table" pyrvapi.rvapi_add_table1(self.summary_tab_results_sec_id + "/" + self.summary_tab_results_sec_table_id, "MRBUMP Results", 1, 0, 1, 1, True) mrb_results = ample_dict.get('mrbump_results') if not mrb_results == self.old_mrbump_results: # We set old_mrbump_results when we create the results_tab self.fill_table(self.summary_tab_results_sec_table_id, mrbump_util.ResultsSummary().results_table(mrb_results), tooltips=self._mrbump_tooltips) if not self.summary_tab_survey_sec_id and not self.ccp4i2: # Only create the table once self.summary_tab_survey_sec_id = "survey" pyrvapi.rvapi_add_section(self.summary_tab_survey_sec_id, "Feedback", self.summary_tab_id, 0, 0, 1, 1, True) rstr = "<h2>How did we do?</h2><h3>Please follow this link and leave some feedback:</h3><a href='{0}' style='color: blue'>{0}</a>".format(reference_manager.survey_url) pyrvapi.rvapi_add_text(rstr, self.summary_tab_survey_sec_id, 0, 0, 1, 1) return self.summary_tab_id
def processOutputFiles(self): ''' Associate the tasks output coordinate file with the output coordinate object XYZOUT: ''' #debug_console() # Split an MTZ file into minimtz data objects ''' outputFilesToMake = ['FPHIOUT','DIFFPHIOUT'] columnsToTake = ['FWT,PHWT','DELFWT,PHDELWT'] infile = os.path.join(self.workDirectory,'final.mtz') error = self.splitHklout(outputFilesToMake, columnsToTake, infile=infile) import CCP4ErrorHandling if error.maxSeverity()>CCP4ErrorHandling.SEVERITY_WARNING: return CPluginScript.FAILED ''' #Create (dummy) PROGRAMXML, which basically contains only the log text of the job #without this, a report will not be generated # logfilePath = os.path.join(self.getWorkDirectory(),LOGFILE_NAME) # with open(self.makeFileName("PROGRAMXML"),"w") as programXMLFile: # xmlStructure = etree.Element(AMPLE_ROOT_NODE) # logText = etree.SubElement(xmlStructure,AMPLE_LOG_NODE) # with open(logfilePath,"r") as logFile: # logText.text = etree.CDATA(logFile.read()) # programXMLFile.write(etree.tostring(xmlStructure)) # programXML file is generated by pyrvapi so we only handle the results specific for I2 here. # results_summary.sortResults(mrb_results, prioritise="SHELXE_CC")[0:min(len(mrb_results),mrbump_util.TOP_KEEP)], top_files = mrbump_util.ResultsSummary(results_pkl=os.path.join( self.getWorkDirectory(), I2DIR, AMPLE_PKL)).topFiles() if top_files: for i, d in enumerate(top_files): # Need to copy the files into the actual project directory - cannot be a sub-directory. Not entirely sure why but... xyz = os.path.join(self.getWorkDirectory(), os.path.basename(d['pdb'])) mtz = os.path.join(self.getWorkDirectory(), os.path.basename(d['mtz'])) # REMOVE CHECK AS FILES SHOULD EXIST if os.path.isfile(d['pdb']): shutil.copy2(d['pdb'], xyz) if os.path.isfile(d['mtz']): shutil.copy2(d['mtz'], mtz) self.container.outputData.XYZOUT.append(xyz) self.container.outputData.XYZOUT[ -1].annotation = 'PDB file of {0} #{1}'.format( d['source'], i + 1) self.container.outputData.HKLOUT.append(mtz) self.container.outputData.HKLOUT[ -1].annotation = 'MTZ file of {0} #{1}'.format( d['source'], i + 1) # logPath = os.path.join(self.getWorkDirectory(),LOGFILE_NAME) # if os.path.isfile(logPath): # with open(logPath, 'r') as logFile: # element = etree.SubElement(self.xmlroot,AMPLE_LOG_NODE) # element.text = etree.CDATA(logFile.read()) # self.flushXML() return self.SUCCEEDED
def create_results_tab(self, ample_dict): if self.ccp4i2 or not self.summary_tab_id: return if not self._got_mrbump_results(ample_dict): return mrb_results = ample_dict['mrbump_results'] if mrb_results == self.old_mrbump_results: return self.old_mrbump_results = mrb_results if not self.results_tab_id: self.results_tab_id = "results_tab" # Insert results tab before summary tab pyrvapi.rvapi_insert_tab( self.results_tab_id, "Results", self.summary_tab_id, False) # Last arg is "open" - i.e. show or hide # Delete old sections: pyrvapi.rvapi_flush() for section_id in self.results_tab_sections: pyrvapi.rvapi_remove_widget(section_id) pyrvapi.rvapi_flush() self.results_tab_sections = [] ensemble_results = ample_dict[ 'ensembles_data'] if 'ensembles_data' in ample_dict[ 'ensembles_data'] else None mrbsum = mrbump_util.ResultsSummary( results=mrb_results[0:min(len(mrb_results), mrbump_util.TOP_KEEP)]) mrbsum.sortResults(prioritise="SHELXE_CC") self.results_section( self.results_tab_id, mrbsum.results, ensemble_results, "Top {0} SHELXE Results".format(mrbump_util.TOP_KEEP)) mrbsum.sortResults(prioritise="PHASER_TFZ") self.results_section( self.results_tab_id, mrbsum.results, ensemble_results, "Top {0} PHASER Results".format(mrbump_util.TOP_KEEP)) return self.results_tab_id
def monitor(): r = mrbump_util.ResultsSummary() r.extractResults(optd['mrbump_dir'], purge=bool(optd['purge'])) optd['mrbump_results'] = r.results return self.ample_output.display_results(optd)
def molecular_replacement(self, optd): if not optd['mrbump_scripts']: # MRBUMP analysis of the ensembles logger.info('----- Running MRBUMP on ensembles--------\n\n') if len(optd['ensembles']) < 1: msg = "ERROR! Cannot run MRBUMP as there are no ensembles!" exit_util.exit_error(msg) if optd['mrbump_dir'] is None: bump_dir = os.path.join(optd['work_dir'], 'MRBUMP') optd['mrbump_dir'] = bump_dir else: bump_dir = optd['mrbump_dir'] if not os.path.exists(bump_dir): os.mkdir(bump_dir) optd['mrbump_results'] = [] logger.info("Running MRBUMP jobs in directory: %s", bump_dir) # Set an ensemble-specific phaser_rms if required if optd['phaser_rms'] == 'auto': ensembler.set_phaser_rms_from_subcluster_score(optd) # Sort the ensembles in a favourable way logger.info("Sorting ensembles") sort_keys = [ 'cluster_num', 'truncation_level', 'subcluster_radius_threshold', 'side_chain_treatment' ] ensemble_pdbs_sorted = ensembler.sort_ensembles( optd['ensembles'], optd['ensembles_data'], keys=sort_keys, prioritise=True) # Create job scripts logger.info("Generating MRBUMP runscripts") optd['mrbump_scripts'] = mrbump_util.write_mrbump_files( ensemble_pdbs_sorted, optd, job_time=mrbump_util.MRBUMP_RUNTIME, ensemble_options=optd['ensemble_options'], directory=bump_dir) # Create function for monitoring jobs - static function decorator? if self.ample_output: def monitor(): r = mrbump_util.ResultsSummary() r.extractResults(optd['mrbump_dir'], purge=bool(optd['purge'])) optd['mrbump_results'] = r.results return self.ample_output.display_results(optd) else: monitor = None # Save results here so that we have the list of scripts and mrbump directory set ample_util.save_amoptd(optd) # Change to mrbump directory before running os.chdir(optd['mrbump_dir']) ok = workers_util.run_scripts( job_scripts=optd['mrbump_scripts'], monitor=monitor, check_success=mrbump_util.checkSuccess, early_terminate=optd['early_terminate'], nproc=optd['nproc'], job_time=mrbump_util.MRBUMP_RUNTIME, job_name='mrbump', submit_cluster=optd['submit_cluster'], submit_qtype=optd['submit_qtype'], submit_queue=optd['submit_queue'], submit_pe_lsf=optd['submit_pe_lsf'], submit_pe_sge=optd['submit_pe_sge'], submit_array=optd['submit_array'], submit_max_array=optd['submit_max_array']) if not ok: msg = "An error code was returned after running MRBUMP on the ensembles!\n" + \ "For further information check the logs in directory: {0}".format(optd['mrbump_dir']) logger.critical(msg) # Collect the MRBUMP results results_summary = mrbump_util.ResultsSummary() optd['mrbump_results'] = results_summary.extractResults( optd['mrbump_dir'], purge=bool(optd['purge'])) optd['success'] = results_summary.success ample_util.save_amoptd(optd) summary = mrbump_util.finalSummary(optd) logger.info(summary)
def test_topfiles(self): topf = mrbump_util.ResultsSummary(results_pkl=os.path.join( self.testfiles_dir, AMPLE_PKL)).topFiles() self.assertEqual(len(topf), 3) self.assertIn('info', topf[2])
def results_section(self, results_tab_id, mrb_results, ensemble_results, section_title): # # Results Tab # if not mrb_results: return # Create unique identifier for this section by using the id # All ids will have this appended to avoid clashes uid = str(uuid.uuid4()) section_id = section_title.replace(" ", "_") + uid self.results_tab_sections.append( section_id) # Add to list so we can remove if we update pyrvapi.rvapi_add_panel(section_id, results_tab_id, 0, 0, 1, 1) pyrvapi.rvapi_add_text("<h3>{0}</h3>".format(section_title), section_id, 0, 0, 1, 1) results_tree = "results_tree" + section_id pyrvapi.rvapi_add_tree_widget(results_tree, section_title, section_id, 0, 0, 1, 1) for r in mrb_results: name = r['ensemble_name'] # container_id="sec_{0}".format(name) # pyrvapi.rvapi_add_section(container_id,"Results for: {0}".format(name),results_tree,0,0,1,1,True) container_id = "sec_{0}".format(name) + uid pyrvapi.rvapi_add_panel(container_id, results_tree, 0, 0, 1, 1) header = "<h3>Results for ensemble: {0}</h3>".format(name) pyrvapi.rvapi_add_text(header, container_id, 0, 0, 1, 1) sec_table = "sec_table_{0}".format(name) + uid title = "Results table: {0}".format(name) title = "Summary" pyrvapi.rvapi_add_section(sec_table, title, container_id, 0, 0, 1, 1, True) table_id = "table_{0}".format(name) + uid pyrvapi.rvapi_add_table(table_id, "", sec_table, 1, 0, 1, 1, False) tdata = mrbump_util.ResultsSummary().results_table([r]) self.fill_table(table_id, tdata, tooltips=self._mrbump_tooltips) # Ensemble if ensemble_results: epdb = self.ensemble_pdb(r, ensemble_results) if epdb: sec_ensemble = "sec_ensemble_{0}".format(name) + uid pyrvapi.rvapi_add_section(sec_ensemble, "Ensemble Search Model", container_id, 0, 0, 1, 1, False) data_ensemble = "data_ensemble_{0}".format(name) + uid pyrvapi.rvapi_add_data(data_ensemble, "Ensemble PDB", self.fix_path(epdb), "XYZOUT", sec_ensemble, 2, 0, 1, 1, True) # PHASER if os.path.isfile(str(r['PHASER_logfile'])) or ( os.path.isfile(str(r['PHASER_pdbout'])) and os.path.isfile(str(r['PHASER_mtzout']))): sec_phaser = "sec_phaser_{0}".format(name) + uid pyrvapi.rvapi_add_section(sec_phaser, "PHASER Outputs", container_id, 0, 0, 1, 1, False) if os.path.isfile(str(r['PHASER_pdbout'])) and os.path.isfile( str(r['PHASER_mtzout'])): data_phaser = "data_phaser_out_{0}".format(name) + uid pyrvapi.rvapi_add_data( data_phaser, "PHASER PDB", os.path.splitext(self.fix_path(r['PHASER_pdbout']))[0], "xyz:map", sec_phaser, 2, 0, 1, 1, True) pyrvapi.rvapi_append_to_data( data_phaser, self.fix_path(r['PHASER_mtzout']), "xyz:map") if os.path.isfile(str(r['PHASER_logfile'])): pyrvapi.rvapi_add_data( "data_phaser_logfile_{0}".format(name), "PHASER Logfile", self.fix_path(r['PHASER_logfile']), "text", sec_phaser, 2, 0, 1, 1, True) # REFMAC if os.path.isfile(str(r['REFMAC_logfile'])) or ( os.path.isfile(str(r['REFMAC_pdbout'])) and os.path.isfile(str(r['REFMAC_mtzout']))): sec_refmac = "sec_refmac_{0}".format(name) + uid pyrvapi.rvapi_add_section(sec_refmac, "REFMAC Outputs", container_id, 0, 0, 1, 1, False) if os.path.isfile(str(r['REFMAC_pdbout'])) and os.path.isfile( str(r['REFMAC_mtzout'])): data_refmac = "data_refmac_out_{0}".format(name) + uid pyrvapi.rvapi_add_data( data_refmac, "REFMAC PDB", os.path.splitext(self.fix_path(r['REFMAC_pdbout']))[0], "xyz:map", sec_refmac, 2, 0, 1, 1, True) pyrvapi.rvapi_append_to_data( data_refmac, self.fix_path(r['REFMAC_mtzout']), "xyz:map") if os.path.isfile(str(r['REFMAC_logfile'])): pyrvapi.rvapi_add_data( "data_refmac_logfile_{0}".format(name), "REFMAC Logfile", self.fix_path(r['REFMAC_logfile']), "text", sec_refmac, 2, 0, 1, 1, True) # Buccaner if os.path.isfile(str(r['BUCC_logfile'])) or ( os.path.isfile(str(r['BUCC_pdbout'])) and os.path.isfile(str(r['BUCC_mtzout']))): sec_bucc = "sec_bucc_{0}".format(name) + uid pyrvapi.rvapi_add_section(sec_bucc, "BUCCANEER Outputs", container_id, 0, 0, 1, 1, False) if os.path.isfile(str(r['BUCC_pdbout'])) and os.path.isfile( str(r['BUCC_mtzout'])): data_bucc = "data_bucc_out_{0}".format(name) + uid pyrvapi.rvapi_add_data( data_bucc, "BUCC PDB", os.path.splitext(self.fix_path(r['BUCC_pdbout']))[0], "xyz:map", sec_bucc, 2, 0, 1, 1, True) pyrvapi.rvapi_append_to_data( data_bucc, self.fix_path(r['BUCC_mtzout']), "xyz:map") if os.path.isfile(str(r['BUCC_logfile'])): pyrvapi.rvapi_add_data( "data_bucc_logfile_{0}".format(name), "BUCC Logfile", self.fix_path(r['BUCC_logfile']), "text", sec_bucc, 2, 0, 1, 1, True) # Arpwarp if os.path.isfile(str(r['ARP_logfile'])) or ( os.path.isfile(str(r['ARP_pdbout'])) and os.path.isfile(str(r['ARP_mtzout']))): sec_arp = "sec_arp_{0}".format(name) + uid pyrvapi.rvapi_add_section(sec_arp, "ARPWARP Outputs", container_id, 0, 0, 1, 1, False) if os.path.isfile(str(r['ARP_pdbout'])) and os.path.isfile( str(r['ARP_mtzout'])): data_arp = "data_arp_out_{0}".format(name) + uid pyrvapi.rvapi_add_data( data_arp, "ARP PDB", os.path.splitext(self.fix_path(r['ARP_pdbout']))[0], "xyz:map", sec_arp, 2, 0, 1, 1, True) pyrvapi.rvapi_append_to_data( data_arp, self.fix_path(r['ARP_mtzout']), "xyz:map") if os.path.isfile(str(r['ARP_logfile'])): pyrvapi.rvapi_add_data("data_arp_logfile_{0}".format(name), "ARP Logfile", self.fix_path(r['ARP_logfile']), "text", sec_arp, 2, 0, 1, 1, True) # SHELXE if os.path.isfile(str(r['SHELXE_logfile'])) or ( os.path.isfile(str(r['SHELXE_pdbout'])) and os.path.isfile(str(r['SHELXE_mtzout']))): sec_shelxe = "sec_shelxe_{0}".format(name) + uid pyrvapi.rvapi_add_section(sec_shelxe, "SHELXE Outputs", container_id, 0, 0, 1, 1, False) if os.path.isfile(str(r['SHELXE_pdbout'])) and os.path.isfile( str(r['SHELXE_mtzout'])): data_shelxe = "data_shelxe_out_{0}".format(name) + uid pyrvapi.rvapi_add_data( data_shelxe, "SHELXE PDB", os.path.splitext(self.fix_path(r['SHELXE_pdbout']))[0], "xyz:map", sec_shelxe, 2, 0, 1, 1, True) pyrvapi.rvapi_append_to_data( data_shelxe, self.fix_path(r['SHELXE_mtzout']), "xyz:map") if os.path.isfile(str(r['SHELXE_logfile'])): pyrvapi.rvapi_add_data( "data_shelxe_logfile_{0}".format(name), "SHELXE Logfile", self.fix_path(r['SHELXE_logfile']), "text", sec_shelxe, 2, 0, 1, 1, True) # Buccaner Rebuild if os.path.isfile(str(r['SXRBUCC_logfile'])) or ( os.path.isfile(str(r['SXRBUCC_pdbout'])) and os.path.isfile(str(r['SXRBUCC_mtzout']))): sec_sxrbucc = "sec_sxrbucc_{0}".format(name) + uid pyrvapi.rvapi_add_section( sec_sxrbucc, "BUCCANEER SHELXE Trace Rebuild Outputs", container_id, 0, 0, 1, 1, False) if os.path.isfile(str(r['SXRBUCC_pdbout'])) and os.path.isfile( str(r['SXRBUCC_mtzout'])): data_sxrbucc = "data_sxrbucc_out_{0}".format(name) + uid pyrvapi.rvapi_add_data( data_sxrbucc, "SXRBUCC PDB", os.path.splitext(self.fix_path( r['SXRBUCC_pdbout']))[0], "xyz:map", sec_sxrbucc, 2, 0, 1, 1, True) pyrvapi.rvapi_append_to_data( data_sxrbucc, self.fix_path(r['SXRBUCC_mtzout']), "xyz:map") if os.path.isfile(str(r['SXRBUCC_logfile'])): pyrvapi.rvapi_add_data( "data_sxrbucc_logfile_{0}".format(name), "SXRBUCC Logfile", self.fix_path(r['SXRBUCC_logfile']), "text", sec_sxrbucc, 2, 0, 1, 1, True) # Arpwarp Rebuild if os.path.isfile(str(r['SXRARP_logfile'])) or ( os.path.isfile(str(r['SXRARP_pdbout'])) and os.path.isfile(str(r['SXRARP_mtzout']))): sec_sxrarp = "sec_sxrarp_{0}".format(name) + uid pyrvapi.rvapi_add_section( sec_sxrarp, "ARPWARP SHELXE Trace Redbuild Outputs", container_id, 0, 0, 1, 1, False) if os.path.isfile(str(r['SXRARP_pdbout'])) and os.path.isfile( str(r['SXRARP_mtzout'])): data_sxrarp = "data_sxrarp_out_{0}".format(name) + uid pyrvapi.rvapi_add_data( data_sxrarp, "SXRARP PDB", os.path.splitext(self.fix_path(r['SXRARP_pdbout']))[0], "xyz:map", sec_sxrarp, 2, 0, 1, 1, True) pyrvapi.rvapi_append_to_data( data_sxrarp, self.fix_path(r['SXRARP_mtzout']), "xyz:map") if os.path.isfile(str(r['SXRARP_logfile'])): pyrvapi.rvapi_add_data( "data_sxrarp_logfile_{0}".format(name), "SXRARP Logfile", self.fix_path(r['SXRARP_logfile']), "text", sec_sxrarp, 2, 0, 1, 1, True) pyrvapi.rvapi_set_tree_node(results_tree, container_id, "{0}".format(name), "auto", "") return
def create_summary_tab(self, ample_dict): # # Summary Tab # self._create_summary_tab() if not (ample_dict['single_model_mode'] or ample_dict['homologs'] or ample_dict['ideal_helices']) and \ bool(ample_dict['ensembles_data']) and not self.summary_tab_ensemble_sec_id: self.rm_pending_section() ensembles_data = ample_dict['ensembles_data'] self.summary_tab_ensemble_sec_id = "ensembles" pyrvapi.rvapi_add_section(self.summary_tab_ensemble_sec_id, "Ensembles", self.summary_tab_id, 0, 0, 1, 1, True) # Get the ensembling data d = ensembler.collate_cluster_data(ensembles_data) clusters = d['clusters'] rstr = "" rstr += "Ensemble Results<br/>" rstr += "----------------<br/><br/>" rstr += "Cluster method: {0}<br/>".format(d['cluster_method']) rstr += "Cluster score type: {0}<br/>".format( d['cluster_score_type']) rstr += "Truncation method: {0}<br/>".format( d['truncation_method']) rstr += "Percent truncation: {0}<br/>".format( d['percent_truncation']) rstr += "Side-chain treatments: {0}<br/>".format( d['side_chain_treatments']) rstr += "Number of clusters: {0}<br/><br/>".format( len(clusters.keys())) rstr += "Generated {0} ensembles<br/><br/>".format( len(ensembles_data)) pyrvapi.rvapi_add_text(rstr, self.summary_tab_ensemble_sec_id, 0, 0, 1, 1) ensemble_table = "ensemble_table" pyrvapi.rvapi_add_table1( self.summary_tab_ensemble_sec_id + "/" + ensemble_table, "Ensembling Results", 1, 0, 1, 1, True) # for cluster_num in sorted(clusters.keys()): # rstr += "\n" # rstr += "Cluster {0}\n".format(cluster_num) # rstr += "Number of models: {0}\n".format(clusters[cluster_num]['cluster_num_models']) # rstr += "Cluster centroid: {0}\n".format(clusters[cluster_num]['cluster_centroid']) # rstr += "\n" # tdata = cluster_table_data(clusters, cluster_num) # rstr += tableFormat.pprint_table(tdata) # cluster_num = 1 tdata = ensembler.cluster_table_data(clusters, cluster_num, d['side_chain_treatments']) self.fill_table(ensemble_table, tdata, tooltips=self._ensemble_tooltips) # # MRBUMP Results # if not self._got_mrbump_results(ample_dict): return self.summary_tab_id if not self.summary_tab_results_sec_id: self.rm_pending_section() # Only create the table once self.summary_tab_results_sec_id = "mrbump" pyrvapi.rvapi_add_section(self.summary_tab_results_sec_id, "MRBUMP", self.summary_tab_id, 0, 0, 1, 1, True) self.summary_tab_results_sec_table_id = "mrbump_table" pyrvapi.rvapi_add_table1( self.summary_tab_results_sec_id + "/" + self.summary_tab_results_sec_table_id, "MRBUMP Results", 1, 0, 1, 1, True) mrb_results = ample_dict['mrbump_results'] if not mrb_results == self.old_mrbump_results: # We set old_mrbump_results when we create the results_tab self.fill_table( self.summary_tab_results_sec_table_id, mrbump_util.ResultsSummary().results_table(mrb_results), tooltips=self._mrbump_tooltips) # # Survey section # if not self.summary_tab_survey_sec_id and not self.ccp4i2: # Only create the table once self.summary_tab_survey_sec_id = "survey" pyrvapi.rvapi_add_section(self.summary_tab_survey_sec_id, "Feedback", self.summary_tab_id, 0, 0, 1, 1, True) rstr = "<h2>How did we do?</h2><h3>Please follow this link and leave some feedback:</h3><a href='{0}' style='color: blue'>{0}</a>".format( ample_util.survey_url) pyrvapi.rvapi_add_text(rstr, self.summary_tab_survey_sec_id, 0, 0, 1, 1) return self.summary_tab_id
def results_section(self, results_tab_id, mrb_results, ensemble_results, section_title): """Results Tab""" if not mrb_results: return # Create unique identifier for this section by using the id # All ids will have this appended to avoid clashes uid = str(uuid.uuid4()) section_id = section_title.replace(" ", "_") + uid self.results_tab_sections.append( section_id) # Add to list so we can remove if we update pyrvapi.rvapi_add_panel(section_id, results_tab_id, 0, 0, 1, 1) pyrvapi.rvapi_add_text("<h3>{0}</h3>".format(section_title), section_id, 0, 0, 1, 1) results_tree = "results_tree" + section_id pyrvapi.rvapi_add_tree_widget(results_tree, section_title, section_id, 0, 0, 1, 1) for r in mrb_results: ensemble_name = r['ensemble_name'] container_id = "sec_{0}".format(ensemble_name) + uid pyrvapi.rvapi_add_panel(container_id, results_tree, 0, 0, 1, 1) header = "<h3>Results for ensemble: {0}</h3>".format(ensemble_name) pyrvapi.rvapi_add_text(header, container_id, 0, 0, 1, 1) sec_table = "sec_table_{0}".format(ensemble_name) + uid title = "Results table: {0}".format(ensemble_name) title = "Summary" pyrvapi.rvapi_add_section(sec_table, title, container_id, 0, 0, 1, 1, True) table_id = "table_{0}".format(ensemble_name) + uid pyrvapi.rvapi_add_table(table_id, "", sec_table, 1, 0, 1, 1, False) tdata = mrbump_util.ResultsSummary().results_table([r]) self.fill_table(table_id, tdata, tooltips=self._mrbump_tooltips) # Ensemble if ensemble_results: epdb = self.ensemble_pdb(r, ensemble_results) if epdb: sec_ensemble = "sec_ensemble_{0}".format( ensemble_name) + uid pyrvapi.rvapi_add_section(sec_ensemble, "Ensemble Search Model", container_id, 0, 0, 1, 1, False) data_ensemble = "data_ensemble_{0}".format( ensemble_name) + uid pyrvapi.rvapi_add_data(data_ensemble, "Ensemble PDB", self.fix_path(epdb), "XYZOUT", sec_ensemble, 2, 0, 1, 1, True) # PHASER self.add_results_section( result_dict=r, ensemble_name=ensemble_name, program_name='PHASER', logfile_key='PHASER_logfile', pdb_key='PHASER_pdbout', mtz_key='PHASER_mtzout', uid=uid, container_id=container_id, ) # REFMAC self.add_results_section( result_dict=r, ensemble_name=ensemble_name, program_name='Refmac', logfile_key='REFMAC_logfile', pdb_key='REFMAC_pdbout', mtz_key='REFMAC_mtzout', uid=uid, container_id=container_id, ) # Buccaner self.add_results_section( result_dict=r, ensemble_name=ensemble_name, program_name='BUCCANEER', logfile_key='BUCC_logfile', pdb_key='BUCC_pdbout', mtz_key='BUCC_mtzout', uid=uid, container_id=container_id, ) # Arpwarp self.add_results_section( result_dict=r, ensemble_name=ensemble_name, program_name='ArpWarp', logfile_key='ARP_logfile', pdb_key='ARP_pdbout', mtz_key='ARP_mtzout', uid=uid, container_id=container_id, ) # SHELXE self.add_results_section( result_dict=r, ensemble_name=ensemble_name, program_name='SHELXE', logfile_key='SHELXE_logfile', pdb_key='SHELXE_pdbout', mtz_key='SHELXE_mtzout', uid=uid, container_id=container_id, ) # Buccaner Rebuild self.add_results_section( result_dict=r, ensemble_name=ensemble_name, program_name='BUCCANEER SHELXE Trace Rebuild', logfile_key='SXRBUCC_logfile', pdb_key='SXRBUCC_pdbout', mtz_key='SXRBUCC_mtzout', uid=uid, container_id=container_id, ) # Arpwarp Rebuild self.add_results_section( result_dict=r, ensemble_name=ensemble_name, program_name='ARPWARP SHELXE Trace Rebuild', logfile_key='SXRARP_logfile', pdb_key='SXRARP_pdbout', mtz_key='SXRARP_mtzout', uid=uid, container_id=container_id, ) pyrvapi.rvapi_set_tree_node(results_tree, container_id, "{0}".format(ensemble_name), "auto", "") return
def test_topfiles(self): topf = mrbump_util.ResultsSummary(results_pkl=os.path.join( self.testfiles_dir, AMPLE_PKL)).topFiles() self.assertEqual(len(topf), 3) self.assertEqual(topf[2]['info'], 'SHELXE trace of MR result')