def __exit__(self, type, value, traceback): self.time2 = time.time() self.clock2 = time.clock() self.mem2 = memory.current() self.memmax = memory.max() print("%5.2f %5.2f %5.2f %s" % (self.time2 - self.time1, self.mem2 * 1e-9, self.memmax * 1e-9, self.name))
def filter(self, record): record.rank = self.rank record.wtime = time.time() - self.t0 record.wmins = record.wtime / 60. record.whours = record.wmins / 60. record.mem = memory.current() / 1024.**3 record.resmem = memory.resident() / 1024.**3 record.memmax = memory.max() / 1024.**3 return record
def __exit__(self, type, value, traceback): sys.stderr.write("%6.2f %6.3f %6.3f %s\n" % (time.time()-self.t1,memory.current()/1024.**3, memory.max()/1024.**3, self.desc))
def __exit__(self, type, value, traceback): sys.stderr.write("%6.2f %6.3f %6.3f %s\n" % (time.time() - self.t1, memory.current() / 1024.**3, memory.max() / 1024.**3, self.desc))
def __enter__(self): self.time1 = time.time() self.clock1 = time.clock() self.mem1 = memory.current()
def __exit__(self, type, value, traceback): self.time2 = time.time() self.clock2 = time.clock() self.mem2 = memory.current() stats.add(self.name, self.time2 - self.time1, self.clock2 - self.clock1, self.mem1, self.mem2 - self.mem1)
else: pmap = pmat.PmatMap(scan, mask, sys="sidelobe:%s" % args.objname) # Build a tod to project onto. tod = np.zeros((d.ndet, d.nsamp), dtype=dtype) # And project pmap.forward(tod, mask) # Any nonzero samples should be cut tod = tod != 0 cut = sampcut.from_mask(tod) del tod progress = 100.0 * (ind - comm.rank * ntod // comm.size) / ( (comm.rank + 1) * ntod // comm.size - comm.rank * ntod // comm.size) print("%3d %5.1f %s %6.4f %d %8.3f %8.3f" % (comm.rank, progress, id, float(cut.sum()) / cut.size, visible, memory.current() / 1024.**3, memory.max() / 1024.**3)) mystats.append([ind, float(cut.sum()) / cut.size, visible]) # Add to my work file _, uids = actdata.split_detname(d.dets) flags = flagrange.from_sampcut(cut, dets=uids) flags.write(hfile, group=id) # Merge all the individual cut files into a single big one. comm.Barrier() if comm.rank == 0: with h5py.File(args.odir + "/cuts.hdf", "w") as ofile: for i in range(comm.size): print("Reducing %3d" % i) with h5py.File(args.odir + "/work_%03d.hdf" % i, "r") as ifile: for key in sorted(ifile.keys()): ifile.copy(key, ofile)
continue visible = np.any(object_pos[1] >= -margin) if not visible: cut = sampcut.empty(d.ndet, d.nsamp) else: pmap = pmat.PmatMap(scan, mask, sys="sidelobe:%s" % args.objname) # Build a tod to project onto. tod = np.zeros((d.ndet, d.nsamp), dtype=dtype) # And project pmap.forward(tod, mask) # Any nonzero samples should be cut tod = tod != 0 cut = sampcut.from_mask(tod) del tod progress = 100.0*(ind-comm.rank*ntod//comm.size)/((comm.rank+1)*ntod//comm.size-comm.rank*ntod//comm.size) print("%3d %5.1f %s %6.4f %d %8.3f %8.3f" % (comm.rank, progress, id, float(cut.sum())/cut.size, visible, memory.current()/1024.**3, memory.max()/1024.**3)) mystats.append([ind, float(cut.sum())/cut.size, visible]) # Add to my work file _, uids = actdata.split_detname(d.dets) flags = flagrange.from_sampcut(cut, dets=uids) flags.write(hfile, group=id) # Merge all the individual cut files into a single big one. comm.Barrier() if comm.rank == 0: with h5py.File(args.odir + "/cuts.hdf", "w") as ofile: for i in range(comm.size): print("Reducing %3d" % i) with h5py.File(args.odir + "/work_%03d.hdf" % i, "r") as ifile: for key in sorted(ifile.keys()): ifile.copy(key, ofile)