def __init__(self, root, conf_param, activ_code): CommonFunctions.__init__(self, root, conf_param, activ_code) self.root = root self.logFile = logFile() self.conf_param = conf_param self.type = "GDB" self.activ_code = activ_code self.ds = ""
def __init__(self,root,conf_param,activ_code): CommonFunctions.__init__(self,root,conf_param,activ_code) self.root = root self.logFile = logFile() self.conf_param = conf_param self.activ_code = activ_code self.logs_text = conf_param["logsText"]["report"] self.ds =""
def __init__(self,where,inputFile,jobName): # specify where output files should be written self.outputDir = where self.makeOutputDir() self.txtInputFile = inputFile self.jobName = jobName self.runLog = logFile('{}/{}_runLog_1.txt'.format(self.outputDir,jobName)) # specify output files for parts of pipeline self.CADoutputMtz = '{}/{}_CADcombined.mtz'.format(self.outputDir,self.jobName) self.SCALEITinputMtz = self.CADoutputMtz self.SCALEIToutputMtz = '{}/{}_SCALEITcombined.mtz'.format(self.outputDir,self.jobName)
def __init__(self, root, conf_param): """ Description: Init function ---------- conf_param : Dictionary the config file root: string the folder path """ self.file = "" self.root = root self.logFile = logFile() self.conf_param = conf_param
def __init__(self, root, conf_param, activ_cod): """ Description: Init function ---------- conf_param : Dictionary the config file root: string the folder path """ self.file = "" self.root = root self.driver = '' self.ds = None self.logFile = logFile() self.conf_param = conf_param self.activ_cod = activ_cod self.nameConvention = True
def startLogFile(self): # create log file for the current job self.quickParseInputFile() self.checkOutputDirExists(printToScreen = self.printOutput) name = '{}RIDLjob.log'.format(self.outputDir) logName = lambda x: name.replace('.log','_{}{}'.format(x,'.log')) i = 1 while os.path.isfile(logName(i)): i += 1 uniqLogName = logName(i) log = logFile(fileName = uniqLogName, fileDir = self.outputDir, printToScreen = self.printOutput) self.logFile = log
def __init__(self, outputDir="", inputFile="", jobName="untitled-job", doScaling=True, log=""): # specify where output files should be written self.outputDir = outputDir self.makeOutputDir(dirName=self.outputDir) self.findFilesInDir() self.txtInputFile = inputFile self.jobName = jobName self.doScaling = doScaling if log == "": f = "{}{}_runLog1.log".format(self.outputDir, jobName) self.runLog = logFile(fileName=f, fileDir=self.outputDir) else: self.runLog = log # specify output files for parts of pipeline self.CADoutputMtz = "{}{}_CADcombined.mtz".format(self.outputDir, self.jobName) self.SCALEITinputMtz = self.CADoutputMtz self.SCALEIToutputMtz = "{}{}_SCALEITcombined.mtz".format(self.outputDir, self.jobName)
def __init__(self, outputDir = '', inputFile = '', jobName = 'untitled-job', log = '', useUnscaledMtz = False): self.outputDir = outputDir self.inputFile = inputFile self.jobName = jobName self.findFilesInDir() # find files initially in working dir # create log file if log == '': f = '{}{}_runLog1.log'.format(self.outputDir,jobName) self.runLog = logFile(fileName = f, fileDir = self.outputDir) else: self.runLog = log # provide option to use non-scaleit scaled Fobs(n) for # density map calculations (retrieved directly from CAD instead) self.useUnscaledMtz = useUnscaledMtz
def __init__(self, outputDir='', log='', densMapNaming='', atomMapNaming='', mtzIn1='./untitled.mtz', Mtz1LabelName='FP', Mtz1SIGFPlabel='SIGFP', RfreeFlag1='FREE', Mtz1LabelRename='D1', mtzIn2='./untitled2.mtz', Mtz2LabelName='FP', Mtz2SIGFPlabel='SIGFP', Mtz2LabelRename='D2', mtzIn3='./untitled3.mtz', Mtz3phaseLabel='PHIC', Mtz3FcalcLabel='FC', ignoreSIGFs=False, Mtz3LabelRename='DP', inputPDBfile='./untitled.pdb', densMapType='DIFF', scaleType='anisotropic', deleteIntermediateFiles=True, FOMweight='none', sfall_VDWR=1, mapResLimits=',', includeFCmaps=True, useLaterCellDims=True, sfallGRIDdims=[], mapAxisOrder=[], firstTimeRun=True, premadeAtomMap='', spaceGroup='', gridSampBeforeCropping=[], cropToModel=False): # specify where output files should be written self.outputDir = outputDir self.makeOutputDir(dirName=self.outputDir) self.findFilesInDir() self.densMapNaming = densMapNaming if atomMapNaming == '': atomMapNaming = densMapNaming self.atomMapNaming = atomMapNaming self.scaleType = scaleType self.mtzIn1 = mtzIn1 self.Mtz1LabelName = Mtz1LabelName self.RfreeFlag1 = RfreeFlag1 self.Mtz1LabelRename = Mtz1LabelRename self.Mtz1SIGFPlabel = Mtz1SIGFPlabel self.mtzIn2 = mtzIn2 self.Mtz2LabelName = Mtz2LabelName self.Mtz2LabelRename = Mtz2LabelRename self.Mtz2SIGFPlabel = Mtz2SIGFPlabel # Provide option to ignore SIGFs during map calculations. # Must ensure scaling set to NONE if this chosen self.ignoreSIGFs = ignoreSIGFs if ignoreSIGFs: if scaleType.lower() != 'none': error(text='Cross-dataset scaling specified, but user has ' + 'specified to ignore all SIGF terms. Incompatible!', log=self.runLog, type='error') if densMapType == 'HIGHONLY': if scaleType.lower() != 'none': error(text='Cross-dataset scaling specified, but user has ' + 'specified densMapType as "HIGHONLY". Incompatible!', log=self.runLog, type='error') self.mtzIn3 = mtzIn3 self.Mtz3phaseLabel = Mtz3phaseLabel self.Mtz3FcalcLabel = Mtz3FcalcLabel self.Mtz3LabelRename = Mtz3LabelRename self.inputPDBfile = inputPDBfile self.densMapType = densMapType self.deleteIntermediateFiles = deleteIntermediateFiles self.FOMweight = FOMweight self.sfall_VDWR = sfall_VDWR self.mapResLimits = mapResLimits self.includeFCmaps = includeFCmaps self.useLaterCellDims = useLaterCellDims self.sfallGRIDdims = sfallGRIDdims self.firstTimeRun = firstTimeRun self.spaceGroup = spaceGroup self.axes = mapAxisOrder self.gridSamps = gridSampBeforeCropping # usually not crop to model, but crop to asym unit # to get a smaller output map file self.cropToModel = cropToModel self.findFilesInDir() # create log file if log == '': f = '{}{}_runLog1.log'.format( self.outputDir+'RIDL-log', densMapNaming) self.runLog = logFile(fileName=f, fileDir=self.outputDir) else: self.runLog = log self.CADoutputMtz = '{}{}_CADcombined.mtz'.format( self.outputDir, densMapNaming) self.SCALEIToutputMtz = '{}{}_SCALEITcombined.mtz'.format( self.outputDir, densMapNaming) self.PDBCURoutputFile = '{}{}_PDBCUR.pdb'.format( self.outputDir, atomMapNaming) self.reorderedPDBFile = '{}{}_curated.pdb'.format( self.outputDir, atomMapNaming) if premadeAtomMap == '': self.atomTaggedMap = '{}{}_SFALL.map'.format( self.outputDir, atomMapNaming) else: self.atomTaggedMap = premadeAtomMap
def __init__(self, root, conf_param, activ_code): self.root = root self.conf_param = conf_param self.activ_code = activ_code self.logFile = logFile()
elif lyr in dirs: root2 = os.path.join(path, lyr) for root3, dirs_lyr, files in os.walk(root2): VectorLayer = globals()[attri["VectorFormats"]["symbology"]["className"]] vectortype = VectorLayer(root3,attri,activation_code) vectortype.checkextension(files) elif report in dirs: root2 = os.path.join(path, report) for root3, dirs_lyr, files in os.walk(root2): VectorLayer = globals()[attri["VectorFormats"]["report"]["className"]] vectortype = VectorLayer(root3,attri,activation_code) vectortype.checkextension(files) time = logs_text["time"] #shutil.rmtree(attri["Temp_root"]) time.append(str(datetime.now() - startTime)) #logFile.writelogs(time) #print("FINISH") except Exception as ex: e = logs_text["e"] e.append(str(ex)) logFile.writelogs(e) time = logs_text["time"] time.append(str(datetime.now() - startTime)) logFile.writelogs(time) logFile = logFile() main(sys.argv[1],logFile)
def runPipeline(self): # read input file first success = self.readInputFile() if success is False: return 1 # create log file self.runLog = logFile('{}/{}_runLog_2.txt'.format(self.outputDir,self.jobName)) # run pdbcur job pdbcur = PDBCURjob(self.pdbcurPDBinputFile,self.outputDir,self.runLog) success = pdbcur.run() if success is False: return 2 self.PDBCURoutputFile = pdbcur.outputPDBfile # reorder atoms in PDB file self.renumberPDBFile() # get space group from PDB file success = self.getSpaceGroup() if success is False: return 3 # run SFALL job sfall = SFALLjob(self.reorderedPDBFile,self.outputDir,self.sfall_VDWR, self.spaceGroup,self.sfall_GRID,'ATMMOD',self.runLog) success = sfall.run() if success is False: return 4 # run FFT job sfallMap = mapTools(sfall.outputMapFile) axes = [sfallMap.fastaxis,sfallMap.medaxis,sfallMap.slowaxis] gridSamps = [sfallMap.gridsamp1,sfallMap.gridsamp2,sfallMap.gridsamp3] labelsInit = ['FP_'+self.initPDB,'SIGFP_'+self.initPDB,'FOM_'+self.initPDB,'PHIC_'+self.initPDB] labelsLater = ['FP_'+self.laterPDB,'SIGFP_'+self.laterPDB,'FOM_'+self.laterPDB,'PHIC_'+self.laterPDB] if self.densMapType != 'END': fft = FFTjob(self.densMapType,self.FOMweight,self.reorderedPDBFile,self.inputMtzFile, self.outputDir,axes,gridSamps,labelsLater,labelsInit,self.runLog) success = fft.run() else: # run END job if required (may take time to run!!) endInputPDB = self.pdbcurPDBinputFile endInputMTZ = ''.join(endInputPDB.split('.')[:-1]+['.mtz']) endInputEFF = ''.join(endInputPDB.split('.')[:-1]+['.eff']) end = ENDjob(endInputPDB,endInputMTZ,endInputEFF,self.outputDir,gridSamps,self.runLog) success = end.run() if success is False: return 5 # crop fft and atom-tagged maps to asymmetric unit: mapmask1 = MAPMASKjob(sfall.outputMapFile,'',self.outputDir,self.runLog) success = mapmask1.crop2AsymUnit() if success is False: return 6 # choose correct density map to include in MAPMASK cropping below if self.densMapType != 'END': inputDensMap = fft.outputMapFile else: inputDensMap = end.outputMapFile # switch END map axes to match SFALL atom-tagged map if required if self.densMapType == 'END': mapmask_ENDmap = MAPMASKjob(inputDensMap,'',self.outputDir,self.runLog) success = mapmask_ENDmap.switchAxisOrder(axes,self.spaceGroup) if success is False: return 7.0 else: inputDensMap = mapmask_ENDmap.outputMapFile # run MAPMASK job to crop fft density map to asym unit mapmask2 = MAPMASKjob(inputDensMap,'',self.outputDir,self.runLog) success = mapmask2.crop2AsymUnit() if success is False: return 7.1 # run MAPMASK job to crop fft density map to same grid # sampling dimensions as SFALL atom map mapmask3 = MAPMASKjob(mapmask2.outputMapFile,mapmask1.outputMapFile,self.outputDir,self.runLog) success = mapmask3.cropMap2Map() if success is False: return 8 # perform map consistency check between cropped fft and sfall maps fftMap = mapTools(mapmask3.outputMapFile) fftMap.readHeader() sfallMap = mapTools(mapmask1.outputMapFile) sfallMap.readHeader() success = self.mapConsistencyCheck(sfallMap,fftMap) if success is False: return 9 else: return 0