def __exit__(self, type, value, traceback): self.time2 = time.time() self.clock2 = time.clock() self.mem2 = memory.current() self.memmax = memory.max() print("%5.2f %5.2f %5.2f %s" % (self.time2 - self.time1, self.mem2 * 1e-9, self.memmax * 1e-9, self.name))
def filter(self, record): record.rank = self.rank record.wtime = time.time() - self.t0 record.wmins = record.wtime / 60. record.whours = record.wmins / 60. record.mem = memory.current() / 1024.**3 record.resmem = memory.resident() / 1024.**3 record.memmax = memory.max() / 1024.**3 return record
prefix = args.odir + "/" if args.cont and os.path.isfile(prefix + "catalogue.fits"): if verbosity >= 1: print "%3d skipping %3d %3d (already done)" % (comm.rank, y, x) continue if verbosity >= 1: print "%3d processing %3d %3d" % (comm.rank, y, x) sys.stdout.flush() t1 = time.time() tpos = np.array(tyx[i]) pbox = np.array([tpos*tshape,np.minimum((tpos+1)*tshape,shape[-2:])]) box = enmap.pix2sky(shape, wcs, pbox.T).T try: info = eval_tile(mapinfo, box, signals, verbosity=verbosity) output_tile(prefix, [y,x], info) #except (np.linalg.LinAlgError, MemoryError) as e: except Exception as e: print "%3d error while processing %3d %3d: '%s'. Skipping" % (comm.rank, y, x, e.message) continue t2 = time.time() if verbosity >= 1: print "%3d processed %3d %3d in %7.1f max-mem %7.3f" % (comm.rank, y, x, t2-t1, memory.max()/1024.**3) else: # Single arbitrary tile if not overlaps_any(bounds, boxes): if verbosity >= 1: print "No data in selected region" else: info = eval_tile(mapinfo, bounds, signals, verbosity=verbosity) output_tile(args.odir + "/", info)
def __exit__(self, type, value, traceback): sys.stderr.write("%6.2f %6.3f %6.3f %s\n" % (time.time()-self.t1,memory.current()/1024.**3, memory.max()/1024.**3, self.desc))
def __exit__(self, type, value, traceback): sys.stderr.write("%6.2f %6.3f %6.3f %s\n" % (time.time() - self.t1, memory.current() / 1024.**3, memory.max() / 1024.**3, self.desc))
prefix = args.odir + "/" if args.cont and os.path.isfile(prefix + "catalogue.fits"): if verbosity >= 1: print "%3d skipping %3d %3d (already done)" % (comm.rank, y, x) continue if verbosity >= 1: print "%3d processing %3d %3d" % (comm.rank, y, x) sys.stdout.flush() t1 = time.time() tpos = np.array(tyx[i]) pbox = np.array([tpos*tshape,np.minimum((tpos+1)*tshape,shape[-2:])]) box = enmap.pix2sky(shape, wcs, pbox.T).T try: info = eval_tile(mapinfo, box, signals, verbosity=verbosity) output_tile(prefix, [y,x], info) #except (np.linalg.LinAlgError, MemoryError) as e: except Exception as e: print "%3d error while processing %3d %3d: '%s'. Skipping" % (comm.rank, y, x, str(e)) continue t2 = time.time() if verbosity >= 1: print "%3d processed %3d %3d in %7.1f max-mem %7.3f" % (comm.rank, y, x, t2-t1, memory.max()/1024.**3) else: # Single arbitrary tile if not overlaps_any(bounds, boxes): if verbosity >= 1: print "No data in selected region" else: info = eval_tile(mapinfo, bounds, signals, verbosity=verbosity) output_tile(args.odir + "/", info)
else: pmap = pmat.PmatMap(scan, mask, sys="sidelobe:%s" % args.objname) # Build a tod to project onto. tod = np.zeros((d.ndet, d.nsamp), dtype=dtype) # And project pmap.forward(tod, mask) # Any nonzero samples should be cut tod = tod != 0 cut = sampcut.from_mask(tod) del tod progress = 100.0 * (ind - comm.rank * ntod // comm.size) / ( (comm.rank + 1) * ntod // comm.size - comm.rank * ntod // comm.size) print("%3d %5.1f %s %6.4f %d %8.3f %8.3f" % (comm.rank, progress, id, float(cut.sum()) / cut.size, visible, memory.current() / 1024.**3, memory.max() / 1024.**3)) mystats.append([ind, float(cut.sum()) / cut.size, visible]) # Add to my work file _, uids = actdata.split_detname(d.dets) flags = flagrange.from_sampcut(cut, dets=uids) flags.write(hfile, group=id) # Merge all the individual cut files into a single big one. comm.Barrier() if comm.rank == 0: with h5py.File(args.odir + "/cuts.hdf", "w") as ofile: for i in range(comm.size): print("Reducing %3d" % i) with h5py.File(args.odir + "/work_%03d.hdf" % i, "r") as ifile: for key in sorted(ifile.keys()): ifile.copy(key, ofile)
def write(self, desc, level, exact=False, newline=True, prepend=""): if level == self.level or not exact and level <= self.level: prepend = "%6.2f " % (memory.max()/1024.**3) + prepend sys.stderr.write(prepend + self.prefix + desc + ("\n" if newline else ""))
continue visible = np.any(object_pos[1] >= -margin) if not visible: cut = sampcut.empty(d.ndet, d.nsamp) else: pmap = pmat.PmatMap(scan, mask, sys="sidelobe:%s" % args.objname) # Build a tod to project onto. tod = np.zeros((d.ndet, d.nsamp), dtype=dtype) # And project pmap.forward(tod, mask) # Any nonzero samples should be cut tod = tod != 0 cut = sampcut.from_mask(tod) del tod progress = 100.0*(ind-comm.rank*ntod//comm.size)/((comm.rank+1)*ntod//comm.size-comm.rank*ntod//comm.size) print("%3d %5.1f %s %6.4f %d %8.3f %8.3f" % (comm.rank, progress, id, float(cut.sum())/cut.size, visible, memory.current()/1024.**3, memory.max()/1024.**3)) mystats.append([ind, float(cut.sum())/cut.size, visible]) # Add to my work file _, uids = actdata.split_detname(d.dets) flags = flagrange.from_sampcut(cut, dets=uids) flags.write(hfile, group=id) # Merge all the individual cut files into a single big one. comm.Barrier() if comm.rank == 0: with h5py.File(args.odir + "/cuts.hdf", "w") as ofile: for i in range(comm.size): print("Reducing %3d" % i) with h5py.File(args.odir + "/work_%03d.hdf" % i, "r") as ifile: for key in sorted(ifile.keys()): ifile.copy(key, ofile)