def RecombineIndependentTLSSegments(analysis): console.endln() console.debug_stdoutln(">tlsmd_analysis->RecombineIndependentTLSSegments()") console.stdoutln("TLS SEGMENT RECOMBINATION") for chain in analysis.chains: ## E.g., chain="Segment(1:A, Res(ILE,16,A)...Res(SER,116,A))" cpartition_recombination.ChainPartitionRecombinationOptimization(chain)
def LoadStructure(struct_source): """Loads Structure, chooses a unique struct_id string. Also, search the REMARK records for TLS group records. If they are found, then add the TLS group ADP magnitude to the B facors of the ATOM records. """ ## determine the argument type if isinstance(struct_source, str): file_path = struct_source console.kvformat("LOADING STRUCTURE", file_path) fobj = open(file_path, "r") elif hasattr(struct_source, "__iter__") and hasattr(struct_source, "seek"): console.kvformat("LOADING STRUCTURE", str(struct_source)) fobj = struct_source else: raise ValueError ## load struct struct = FileIO.LoadStructure(file = fobj, distance_bonds = True) console.kvformat("HEADER", struct.header) console.kvformat("TITLE", struct.title) console.kvformat("EXPERIMENTAL METHOD", struct.experimental_method) ## set the structure ID if conf.globalconf.struct_id is not None: struct_id = conf.globalconf.struct_id else: struct_id = struct.structure_id conf.globalconf.struct_id = struct_id struct.structure_id = struct_id console.endln() ## if there are REFMAC5 TLS groups in the REMARK records of ## the PDB file, then add those in tls_file = TLS.TLSFile() tls_file.set_file_format(TLS.TLSFileFormatPDB()) ## return to the beginning of the file and read the REMARK/TLS records fobj.seek(0) tls_file.load(fobj) if len(tls_file.tls_desc_list) > 0: console.stdoutln("ADDING TLS GROUP Bequiv TO ATOM TEMPERATURE FACTORS") console.stdoutln(" NUM TLS GROUPS: %d" % (len(tls_file.tls_desc_list))) ## assume REFMAC5 groups where Utotal = Utls + Biso(temp_factor) for tls_desc in tls_file.tls_desc_list: tls_group = tls_desc.construct_tls_group_with_atoms(struct) console.stdoutln(" TLS GROUP: %s" % (tls_group.name)) for atm, Utls in tls_group.iter_atm_Utls(): bresi = atm.temp_factor atm.temp_factor = bresi + (Constants.U2B * numpy.trace(Utls) / 3.0) atm.U = (Constants.B2U * bresi * numpy.identity(3, float)) + Utls console.endln() return struct
def prnt(self): console.stdoutln("TLS Motion Determination (TLSMD) Version %s" % (const.VERSION)) console.endln() console.kvformat("TLS PARAMETER FIT ENGINE", self.tls_model) console.kvformat("MIN_SUBSEGMENT_SIZE", self.min_subsegment_size) console.kvformat("ATOM B-FACTOR WEIGHT_MODEL", self.weight_model) console.kvformat("PROTEIN ATOMS CONSIDERED", self.include_atoms) console.endln()
def prnt_settings(self): chain_ids = [] for chain in self.chains: chain_ids.append(chain.chain_id) cids = ",".join(chain_ids) console.kvformat("STRUCTURE ID", self.struct_id) console.kvformat("CHAIN IDs SELECTED FOR ANALYSIS", cids) console.endln()
def prnt_settings(self): chain_ids = [] for chain in self.chains: chain_ids.append(chain.chain_id) cids = ",".join(chain_ids) console.debug_stdoutln(">tlsmd_analysis->TLSMDAnalysis()") console.kvformat("STRUCTURE ID", self.struct_id) console.kvformat("CHAIN IDs SELECTED FOR ANALYSIS", cids) console.endln()
def FitConstrainedTLSModel(analysis): """ """ console.endln() console.stdoutln("CALCULATING CONSTRAINED TLS MODEL FOR VISUALIZATION") for chain in analysis.iter_chains(): console.stdoutln("CHAIN %s" % (chain.chain_id)) for cpartition in chain.partition_collection.iter_chain_partitions(): console.stdoutln("TLS GROUPS: %d" % (cpartition.num_tls_segments())) for tls in cpartition.iter_tls_segments(): tls.fit_to_chain(cpartition.chain) console.endln()
def prnt_detailed_paths(self): """Debug """ hops = self.nparts if not self.minimized: return dest_j = len(self.V) - 1 for h in xrange(1, hops + 1): console.endln() console.stdoutln("MINIMIZATON VERTEX PATH FOR %d SEGMENTS" % (h)) console.stdoutln("NODE LABEL HOPS COST PREVIOUS NODE EDGE") self.__detailed_path(self.V, self.D, self.P, self.T, h)
def prnt_detailed_paths(self): """Debug """ hops = self.nparts if not self.minimized: return dest_j = len(self.V) - 1 for h in xrange(1, hops + 1): console.endln() console.stdoutln("MINIMIZATON VERTEX PATH FOR %d SEGMENTS" % (h)) console.stdoutln( "NODE LABEL HOPS COST PREVIOUS NODE EDGE" ) self.__detailed_path(self.V, self.D, self.P, self.T, h)
def FitConstrainedTLSModel(analysis): """Calculates constrained TLS model for visualization. """ console.endln() console.debug_stdoutln(">tlsmd_analysis->FitConstrainedTLSModel()") console.stdoutln("CALCULATING CONSTRAINED TLS MODEL FOR VISUALIZATION") ## EAM Feb 2008 User job was getting stuck in fit_to_chain() ## Obviously it would be nice to fix the actual error, but at least we would ## like to be able to give it a swift non-fatal kick by sending SIGUSR1 signal.signal(signal.SIGUSR1, SIGUSR1_handler) ## Progress tracking ## - assume this portion of the run occupies 0.1 -> 0.5 of the total time progress = 0.1 for chain in analysis.iter_chains(): console.stdoutln("CHAIN %s" % (chain.chain_id)) for cpartition in chain.partition_collection.iter_chain_partitions(): ## cpartition.chain = "Segment(1:A, Res(MET,1,A)...Res(VAL,50,A))" console.stdoutln("TLS GROUPS: %d" % (cpartition.num_tls_segments())) for tls in cpartition.iter_tls_segments(): try: tls.fit_to_chain(cpartition.chain) ## TODO: Write out data for residual plots. #gp = gnuplots.LSQR_vs_TLS_Segments_Pre_Plot(cpartition.chain) #console.stdoutln("FIT_TO_CHAIN_PATH: %s" % analysis.struct2_file_path) except (RuntimeError, numpy.linalg.linalg.LinAlgError), e: msg = " Runtime error for [%s]: %s, " % ( tls, e) msg += "trying to continue..." console.stdoutln(msg) print console.formatExceptionInfo() pass ## Track progress progress += 0.4/analysis.num_chains() progress_report = open("progress","w+") print >> progress_report, progress ## progress_report.write(progress) progress_report.close()
def IndependentTLSSegmentOptimization(analysis): """Performs the TLS graph minimization on all TLSGraphs. """ for chain in analysis.chains: isopt = independent_segment_opt.ISOptimization( chain, conf.globalconf.min_subsegment_size, conf.globalconf.nparts) isopt.run_minimization() if not isopt.minimized: continue console.endln() console.stdoutln("="*79) console.stdoutln("MINIMIZING CHAIN %s" % (chain)) isopt.prnt_detailed_paths() chain.partition_collection = isopt.construct_partition_collection(conf.globalconf.nparts) chain.partition_collection.struct = analysis.struct
def IsotropicADPDataSmoother(chain, num_smooth = 1): """Experimental data smoothing of temperature factors. """ console.endln() console.stdoutln("SMOOTHING CHAIN %s ADPs" % (chain.chain_id)) console.kvformat("SMOOTH WINDOW", 2 * num_smooth + 1) num_frags = len(chain) smooth_uiso = dict() ifrag_start = num_smooth ifrag_end = num_frags - num_smooth - 1 for ifrag in xrange(ifrag_start, ifrag_end + 1): smooth_frag = chain[ifrag] frag1 = chain[ifrag - num_smooth] frag2 = chain[ifrag + num_smooth] IT, IL, IS, IOrigin = IsotropicFitSegmentOutlierRejection( chain, frag1.fragment_id, frag2.fragment_id) for atm, uiso in TLS.iter_itls_uiso(smooth_frag.iter_all_atoms(), IT, IL, IS, IOrigin): smooth_uiso[atm] = uiso if ifrag == ifrag_start: for i in range(ifrag_start): smooth_frag = chain[i] for atm, uiso in TLS.iter_itls_uiso(smooth_frag.iter_all_atoms(), IT, IL, IS, IOrigin): smooth_uiso[atm] = uiso elif ifrag == ifrag_end: for i in range(ifrag_end + 1, num_frags): smooth_frag = chain[i] for atm, uiso in TLS.iter_itls_uiso(smooth_frag.iter_all_atoms(), IT, IL, IS, IOrigin): smooth_uiso[atm] = uiso for atm, uiso in smooth_uiso.iteritems(): atm.temp_factor = Constants.U2B * uiso atm.U = numpy.identity(3, float) * uiso
def IsotropicADPDataSmoother(chain, num_smooth=1): """Experimental data smoothing of temperature factors """ console.endln() console.stdoutln("SMOOTHING CHAIN %s ADPs" % (chain.chain_id)) conesole.kvformat("SMOOTH WINDOW", 2 * num_smooth + 1) num_frags = len(chain) smooth_uiso = dict() ifrag_start = num_smooth ifrag_end = num_frags - num_smooth - 1 for ifrag in xrange(ifrag_start, ifrag_end + 1): smooth_frag = chain[ifrag] frag1 = chain[ifrag - num_smooth] frag2 = chain[ifrag + num_smooth] IT, IL, IS, IOrigin = IsotropicFitSegmentOutlierRejection( chain, frag1.fragment_id, frag2.fragment_id) for atm, uiso in TLS.iter_itls_uiso(smooth_frag.iter_all_atoms(), IT, IL, IS, IOrigin): smooth_uiso[atm] = uiso if ifrag == ifrag_start: for i in range(ifrag_start): smooth_frag = chain[i] for atm, uiso in TLS.iter_itls_uiso( smooth_frag.iter_all_atoms(), IT, IL, IS, IOrigin): smooth_uiso[atm] = uiso elif ifrag == ifrag_end: for i in range(ifrag_end + 1, num_frags): smooth_frag = chain[i] for atm, uiso in TLS.iter_itls_uiso( smooth_frag.iter_all_atoms(), IT, IL, IS, IOrigin): smooth_uiso[atm] = uiso for atm, uiso in smooth_uiso.iteritems(): atm.temp_factor = Constants.U2B * uiso atm.U = numpy.identity(3, float) * uiso
def prnt_detailed_paths(self): """Prints out detailed information on the minimization of each vertex path for n segments with Node Label, Hops, Cost, Previous Node, and Edge. """ hops = self.nparts if not self.minimized: return dest_j = len(self.V) - 1 for h in xrange(1, hops + 1): console.endln() console.stdoutln("MINIMIZATON VERTEX PATH FOR %d SEGMENTS" % (h)) header = "NODE LABEL" + " "*14 # V: "V50[C-TERM]" header += "HOPS" + " "*6 # D: "1" header += "COST" + " "*6 # P: "0.0943" header += "PREVIOUS NODE" + " "*10 # T: "V0[N-TERM]" header += "EDGE" # h: "(0, 50, 0.094,('1', '50')) 0.002" console.stdoutln(header) self.__detailed_path(self.V, self.D, self.P, self.T, h)
def IndependentTLSSegmentOptimization(analysis): """Performs the TLS graph minimization on all TLSGraphs. """ for chain in analysis.chains: isopt = independent_segment_opt.ISOptimization( chain, conf.globalconf.min_subsegment_size, conf.globalconf.nparts) ## TODO: Divide this into two CPU times, 2009-12-10 #console.stdoutln("CPU_TIME ->ISOptResidualGraph: %s" % time.clock()) isopt.run_minimization() if not isopt.minimized: continue console.endln() console.stdoutln("="*79) console.debug_stdoutln(">tlsmd_analysis->IndependentTLSSegmentOptimization()") console.stdoutln("MINIMIZING CHAIN %s" % (chain)) isopt.prnt_detailed_paths() chain.partition_collection = isopt.construct_partition_collection(conf.globalconf.nparts) chain.partition_collection.struct = analysis.struct
def SumperimposeHomologousStructure(analysis): """ """ import structcmp target_struct = FileIO.LoadStructure(fil = analysis.struct2_file_path) target_chain = target_struct.get_chain(analysis.struct2_chain_id) if target_chain is None: console.stderrln( "UNABLE TO LOAD TARGET STRUCTURE/CHAIN: %s:%s" % ( target_struct, target_chain)) return analysis.target_chain = target_chain for chain in analysis.iter_chains(): console.endln() console.kvformat("Superimposing Chain", chain.chain_id) hyp = structcmp.TLSConformationPredctionHypothosis(chain, target_chain) for ntls, cpartition in chain.partition_collection.iter_ntls_chain_partitions(): console.endln() console.stdoutln("Number of TLS Segments........: %d" % (ntls)) hyp.add_conformation_prediction_to_chain_partition(cpartition)
msg = " Runtime error for [%s]: %s, " % ( tls, e) msg += "trying to continue..." console.stdoutln(msg) print console.formatExceptionInfo() pass ## Track progress progress += 0.4/analysis.num_chains() progress_report = open("progress","w+") print >> progress_report, progress ## progress_report.write(progress) progress_report.close() console.cpu_time_stdoutln("->FitConstrainedTLSModel: %s" % time.clock()) console.endln() def SuperimposeHomologousStructure(analysis): """ """ import structcmp target_struct = FileIO.LoadStructure(fil = analysis.struct2_file_path) target_chain = target_struct.get_chain(analysis.struct2_chain_id) if target_chain is None: console.stderrln( "UNABLE TO LOAD TARGET STRUCTURE/CHAIN: %s:%s" % ( target_struct, target_chain)) return
def RecombineIndependentTLSSegments(analysis): console.endln() console.stdoutln("TLS SEGMENT RECOMBINATION") for chain in analysis.chains: cpartition_recombination.ChainPartitionRecombinationOptimization(chain)