def convertTifs(pathDir): allTifs = locate("*c*.tif", pathDir) for f1 in allTifs: f2 = f1[:-4] + '.jpg' cmd = "convert " + f1 + " " + f2 call(cmd.split(), shell=False) return True
def convertTifs(pathDir): allTifs = locate("*c*.tif",pathDir) for f1 in allTifs: f2 = f1[:-4]+'.jpg' cmd = "convert " + f1 + " " + f2 call(cmd.split(),shell=False) return True
def __init__(self,exptDir): self.exptDir = exptDir self.dateStamp = os.path.split(self.exptDir)[1] self.allcsv = locate("*_EXPT_RESULTS.csv",self.exptDir) self.allcsv.sort() self.destDir = exptDir self.outFullFile, self.outShortFile = self.makeOutFiles() self.nameCircInt_dict = collections.defaultdict(list) self.nameRingInt_dict = collections.defaultdict(list)
def drawFigures(pathDir): pklDir = os.path.join(os.path.split(pathDir)[0],'pklFiles') pklFiles = locate('*.pkl',pklDir) #combineExptPlots(pathDir,pklDir,radPlotDir) for pklF in pklFiles: print pklF exptType = os.path.split(pklF)[1].split('.')[0] plotType1 = RadialPlotFigure(pathDir,exptType) fnameTag1 = 'radProfile.stdev' plotType1.drawRadialProfile('.meanLineProfile.dict.pkl',fnameTag1) plotType2 = RadialPlotFigure(pathDir,exptType) fnameTag2 = 'radProfile.reduced.stdev' plotType2.drawRadialProfile('.meanLineProfile.reduced.stdev.dict.pkl',fnameTag2)
def drawFigures(pathDir): pklDir = os.path.join(os.path.split(pathDir)[0], 'pklFiles') pklFiles = locate('*.pkl', pklDir) #combineExptPlots(pathDir,pklDir,radPlotDir) for pklF in pklFiles: print pklF exptType = os.path.split(pklF)[1].split('.')[0] plotType1 = RadialPlotFigure(pathDir, exptType) fnameTag1 = 'radProfile.stdev' plotType1.drawRadialProfile('.meanLineProfile.dict.pkl', fnameTag1) plotType2 = RadialPlotFigure(pathDir, exptType) fnameTag2 = 'radProfile.reduced.stdev' plotType2.drawRadialProfile('.meanLineProfile.reduced.stdev.dict.pkl', fnameTag2)
def summarize_fld(dateStamp,pathDir,destDir,typeVal='count'): """ Summary of flds; This typically would be the count density/field; """ def _skip(f): skip = ['offset','maxProj','proj','dict','tflds'] for word in skip: if word in f: return False return True def _checkChannel(f): ch = 'default' if '561' in f: ch = '561' if '647' in f: ch = '647' if '561_647' in f: ch = '561_647' if 'c1.tif' in f: ch = '561' #Must take precautions if 'c2.tif' in f: ch = '647' if 'c3.tif' in f: ch = '561_647' return ch def _summary(ch,listToWrite,ofile): ofile.write('\n CHANNEL \t '+ch+'\n') for l in listToWrite: ofile.write('\t'.join(l)+'\n') return True if typeVal == 'count': out_long = os.path.join(destDir,dateStamp+'_flds.long.count.summary.csv') out_short = os.path.join(destDir,dateStamp+'_flds.short.count.summary.csv') if typeVal == 'intensity': out_long = os.path.join(destDir,dateStamp+'_flds.intensity.long.summary.csv') out_short = os.path.join(destDir,dateStamp+'_flds.intensity.short.summary.csv') ofile_long = open(out_long,'w') if typeVal == 'count': header = '\t'.join(['FULL PATH','SUBDIR','CH','PEAK COUNT']) if typeVal == 'intensity': header = '\t'.join(['FULL PATH', 'SUBDIR','CH','MEAN PEAK INTENSITY']) ofile_long.write(header+'\n') ofile_short = open(out_short,'w') if typeVal == 'count': header = '\t'.join(['FULL PATH','SUBDIR','CH','AVERAGE PEAK COUNT', 'STDEV PEAK COUNT']) if typeVal == 'intensity': header = '\t'.join(['FULL PATH','SUBDIR','CH', 'AVERAGE of MEAN PEAK INTENSITY','STDEV MEAN INTENSITY']) ofile_short.write(header) list_561 = list() list_647 = list() list_561_647 = list() list_default = list() dict_561 = collections.defaultdict(list) dict_647 = collections.defaultdict(list) dict_561_647 = collections.defaultdict(list) dict_default = collections.defaultdict(list) pattern = "*fld*.tif.png*.pkl" allfPkl = locate(pattern, pathDir) fList = [f for f in allfPkl if not _skip(f)] for f in fList: print "Loading file : %s"%(os.path.split(f)[1]) with open(f) as ifile: peakInfo = pickle.load(ifile) if typeVal == 'count': colVal = len(peakInfo) #Just this for now. Simples if typeVal == 'intensity': colVal = getMeanIntensity(peakInfo) exptPath,fname = os.path.split(f) expt = os.path.split(exptPath)[1] ch = _checkChannel(fname) if ch == '561': list_561.append([f,expt,ch,str(colVal)]) dict_561[expt].append([exptPath,expt,colVal]) elif ch == '647': list_647.append([f,expt,ch,str(colVal)]) dict_647[expt].append([exptPath,expt,colVal]) elif ch == '561_647': list_561_647.append([f,expt,ch,str(colVal)]) dict_561_647[expt].append([exptPath,expt,colVal]) elif ch == 'default': list_default = list() dict_default[expt].append([exptPath,expt,colVal]) else: raise SystemExit("Something wrong with Channels") _summary('561',list_561,ofile_long) _summary('647',list_647,ofile_long) _summary('561_647',list_561_647,ofile_long) _summary('default',list_default,ofile_long) ofile_long.close() list_561 = list() list_647 = list() list_561_647 = list() list_ch = [list(),list(),list()] list_default = list() for idx,d in enumerate([dict_561,dict_647,dict_561_647]): for expt,val in d.items(): exptPath = val[0][0] colValL = zip(*val)[2] avgPeaks,stdPeaks = np.mean(colValL),np.std(colValL) list_ch[idx].append([exptPath,expt,str(idx+1),str(avgPeaks),str(stdPeaks)]) _summary('561',list_ch[0],ofile_short) _summary('647',list_ch[1],ofile_short) _summary('561_647',list_ch[2],ofile_short) ofile_short.close() return True