def retile(ipathfmt, opathfmt, itile1=(None,None), itile2=(None,None), otileoff=(0,0), otilenum=(None,None), ocorner=(-np.pi/2,-np.pi), otilesize=(675,675), comm=None, verbose=False, slice=None): """Given a set of tiles on disk with locations ipathfmt % {"y":...,"x":...}, retile them into a new tiling and write the result to opathfmt % {"y":...,"x":...}. The new tiling will have tile size given by otilesize[2]. Negative size means the tiling will to down/left instead of up/right. The corner of the tiling will be at sky coordinates ocorner[2] in radians. The new tiling will be pixel- compatible with the input tiling - w.g. the wcs will only differ by crpix. The output tiling will logically cover the whole sky, but only output tiles that overlap with input tiles will actually be written. This can be modified by using otileoff[2] and otilenum[2]. otileoff gives the tile indices of the corner tile, while otilenum indicates the number of tiles to write.""" # Set up mpi rank, size = (comm.rank, comm.size) if comm is not None else (0, 1) # Expand any scalars otilesize = np.zeros(2,int)+otilesize otileoff = np.zeros(2,int)+otileoff # Find the range of input tiles itile1, itile2 = find_tile_range(ipathfmt, itile1, itile2) # To fill in the rest of the information we need to know more # about the input tiling, so read the first tile ibase = enmap.read_map(ipathfmt % {"y":itile1[0],"x":itile1[1]}) if slice: ibase = eval("ibase"+slice) itilesize = ibase.shape[-2:] # Find the pixel position of our output corners according to the wcs. # This is the last place we need to do a coordinate transformation. # All the rest can be done in pure pixel logic. pixoff = np.round(ibase.sky2pix(ocorner)).astype(int) # Find the range of output tiles def pix2otile(pix, ioff, osize): return (pix-ioff)/osize otile1 = pix2otile(itile1*itilesize, pixoff, otilesize) otile2 = pix2otile(itile2*itilesize-1, pixoff, otilesize) otile1, otile2 = np.minimum(otile1,otile2), np.maximum(otile1,otile2) otile2 += 1 # We can now loop over output tiles cache = [None,None,None] oyx = [(oy,ox) for oy in range(otile1[0],otile2[0]) for ox in range(otile1[1],otile2[1])] for i in range(rank, len(oyx), size): otile = np.array(oyx[i]) # Find out which input tiles overlap with this output tile. # Our tile stretches from opix1:opix2 relative to the global input pixels opix1 = otile*otilesize + pixoff opix2 = (otile+1)*otilesize + pixoff # output tiles and input tiles may increase in opposite directions opix1, opix2 = np.minimum(opix1,opix2), np.maximum(opix1,opix2) try: omap = read_area(ipathfmt, [opix1,opix2],itile1=itile1, itile2=itile2,cache=cache, slice=slice) except IOError: continue oname = opathfmt % {"y":otile[0]+otileoff[0],"x":otile[1]+otileoff[1]} utils.mkdir(os.path.dirname(oname)) enmap.write_map(oname, omap) if verbose: print oname
def write_map(name, map, ext="fits", merged=True): if not merged: # Write as individual tiles in directory of the specified name utils.mkdir(name) for pos, tile in zip(map.loc_pos, map.tiles): enmap.write_map( name + "/tile%03d_%03d.%s" % (tuple(pos) + (ext, )), tile) else: # Write to a single file. This currently creates the full map # in memory while writing. It is unclear how to avoid this # without bypassing pyfits or becoming super-slow. if map.comm.rank == 0: canvas = enmap.zeros(map.shape, map.wcs, map.dtype) else: canvas = None dmap2enmap(map, canvas) if map.comm.rank == 0: enmap.write_map(name, canvas)
parser.add_argument("-v", "--verbose", action="store_true") parser.add_argument("-W", "--wiener", action="store_true") parser.add_argument("-m", "--mask", type=str, default=None) parser.add_argument("--filter-mode", type=str, default="weight") parser.add_argument("--cg-tol", type=float, default=1e-4) parser.add_argument("--detrend", type=int, default=1) args = parser.parse_args() config = jointmap.read_config(args.config) mapinfo = jointmap.Mapset(config, args.sel) tsize = args.tsize # pixels pad = args.pad # pixels dtype = np.float64 ncomp = args.ncomp comm = mpi.COMM_WORLD utils.mkdir(args.odir) # Get the set of bounding boxes, after normalizing them boxes = np.sort(np.array([d.box for d in mapinfo.datasets]), -2) # Read the cmb power spectrum, which is an effective noise # component. T-only #cl_path = os.path.join(os.path.dirname(args.config),config.cl_background) #cl_bg = powspec.read_spectrum(cl_path)[0,0] def overlaps_any(box, refboxes): rdec, rra = utils.moveaxis(refboxes - box[0, :], 2, 0) wdec, wra = np.abs(box[1] - box[0]) rra -= np.floor(rra[:, 0, None] / (2 * np.pi) + 0.5) * (2 * np.pi) for i in range(-1, 2):
parser.add_argument("-p", "--pad", type=int, default=240) parser.add_argument("-C", "--ncomp", type=int, default=3) parser.add_argument("-B", "--obeam", type=str, default=None) parser.add_argument("-c", "--cont", action="store_true") parser.add_argument("-v", "--verbose", action="store_true") parser.add_argument("--filter-mode", type=str, default="weight") args = parser.parse_args() config = jointmap.read_config(args.config) mapinfo = jointmap.Mapset(config, args.sel) tsize = args.tsize # pixels pad = args.pad # pixels dtype = np.float64 ncomp = args.ncomp comm = mpi.COMM_WORLD utils.mkdir(args.odir) # Get the set of bounding boxes, after normalizing them boxes = np.sort(np.array([d.box for d in mapinfo.datasets]),-2) # Read the cmb power spectrum, which is an effective noise # component. T-only #cl_path = os.path.join(os.path.dirname(args.config),config.cl_background) #cl_bg = powspec.read_spectrum(cl_path)[0,0] def overlaps_any(box, refboxes): rdec, rra = utils.moveaxis(refboxes - box[0,:], 2,0) wdec, wra = np.abs(box[1] - box[0]) rra -= np.floor(rra[:,0,None]/(2*np.pi)+0.5)*(2*np.pi) for i in range(-1,2): nra = rra + i*(2*np.pi)
comm = mpi.COMM_WORLD myid = comm.rank nproc = comm.size filedb.init() db = filedb.data filelist = todinfo.get_tods(args.filelist, filedb.scans) def compress_beam(sigma, phi): c,s=np.cos(phi),np.sin(phi) R = np.array([[c,-s],[s,c]]) C = np.diag(sigma**-2) C = R.dot(C).dot(R.T) return np.array([C[0,0],C[1,1],C[0,1]]) utils.mkdir(args.odir) # Dump our settings if myid == 0: config.save(args.odir + "/config.txt") with open(args.odir + "/args.txt","w") as f: f.write(" ".join([pipes.quote(a) for a in sys.argv[1:]]) + "\n") with open(args.odir + "/env.txt","w") as f: for k,v in os.environ.items(): f.write("%s: %s\n" %(k,v)) with open(args.odir + "/ids.txt","w") as f: for id in filelist: f.write("%s\n" % id) shutil.copyfile(filedb.cjoin(["root","dataset","filedb"]), args.odir + "/filedb.txt") try: shutil.copyfile(filedb.cjoin(["root","dataset","todinfo"]), args.odir + "/todinfo.txt") except (IOError, OSError): pass # Set up logging
ids = ids[inds] ifiles = [ifiles[ind] for ind in inds] # Look for duplicates dups = np.where(ids[1:]==ids[:-1])[0] if len(dups) > 0: # Get the first example if comm.rank == 0: print "Duplicate ids in input: " + ", ".join([ifiles[dups[0]],ifiles[dups[1]]]) comm.Finalize() sys.exit(1) nfile = len(ifiles) # Make tmp directory for output tmpdir = args.ofile + ".tmp" tmpfmt = tmpdir + "/cut%03d.hdf" utils.mkdir(tmpdir) # Process each of our blocks with h5py.File(tmpfmt % comm.rank, "w") as hfile: for j, i in enumerate(range(comm.rank*nfile/comm.size, (comm.rank+1)*nfile/comm.size)): ifile, id = ifiles[i], ids[i] progress = min(comm.rank + j*comm.size, nfile-1) print "%5d/%d %5.1f%% %s" % (progress+1, nfile, 100.0*(progress+1)/nfile, id) dets, gain = files.read_gain(ifile) dtype = [("det_uid","i"),("cal","f")] res = np.zeros(len(dets),dtype) res["det_uid"] = dets res["cal"] = gain hfile[id] = res # Then concatenate them into the final file
def combine_tiles(ipathfmt, opathfmt, combine=2, downsample=2, itile1=(None, None), itile2=(None, None), tyflip=False, txflip=False, pad_to=None, comm=None, verbose=False): """Given a set of tiles on disk at locaiton ipathfmt % {"y":...,"x"...}, combine them into larger tiles, downsample and write the result to opathfmt % {"y":...,"x":...}. x and y must be contiguous and start at 0. reftile[2] indicates the tile coordinates of the first valid input tile. This needs to be specified if not all tiles of the logical tiling are physically present. tyflip and txflip indicate if the tiles coordinate system is reversed relative to the pixel coordinates or not." """ # Expand combine and downsample to 2d combine = np.zeros(2, int) + combine downsample = np.zeros(2, int) + downsample if pad_to is not None: pad_to = np.zeros(2, int) + pad_to # Handle optional mpi rank, size = (comm.rank, comm.size) if comm is not None else (0, 1) # Find the range of input tiles itile1, itile2 = find_tile_range(ipathfmt, itile1, itile2) # Read the first tile to get its size information ibase = enmap.read_map(ipathfmt % {"y": itile1[0], "x": itile1[1]}) * 0 # Find the set of output tiles we need to consider otile1 = itile1 / combine otile2 = (itile2 - 1) / combine + 1 # And loop over them oyx = [(oy, ox) for oy in range(otile1[0], otile2[0]) for ox in range(otile1[1], otile2[1])] for i in range(rank, len(oyx), size): oy, ox = oyx[i] # Read in all associated tiles into a list of lists rows = [] for dy in range(combine[0]): iy = oy * combine[0] + dy if iy >= itile2[0]: continue cols = [] for dx in range(combine[1]): ix = ox * combine[1] + dx if ix >= itile2[1]: continue if iy < itile1[0] or ix < itile1[1]: # The first tiles are missing on disk, but are # logically a part of the tiling. Use ibase, # which has been zeroed out. cols.append(ibase) else: itname = ipathfmt % {"y": iy, "x": ix} cols.append(enmap.read_map(itname)) if txflip: cols = cols[::-1] rows.append(cols) # Stack them next to each other into a big tile if tyflip: rows = rows[::-1] omap = enmap.tile_maps(rows) # Downgrade if necessary if np.any(downsample > 1): omap = enmap.downgrade(omap, downsample) if pad_to is not None: # Padding happens towards the end of the tiling, # which depends on the flip status padding = np.array( [[0, 0], [pad_to[0] - omap.shape[-2], pad_to[1] - omap.shape[-1]]]) if tyflip: padding[:, 0] = padding[::-1, 0] if txflip: padding[:, 1] = padding[::-1, 1] omap = enmap.pad(omap, padding) # And output otname = opathfmt % {"y": oy, "x": ox} utils.mkdir(os.path.dirname(otname)) enmap.write_map(otname, omap) if verbose: print otname
parser.add_argument("-c", "--cont", action="store_true") parser.add_argument("-P", "--npass", type=int, default=3) parser.add_argument("--output-full-model", action="store_true") args = parser.parse_args() config = jointmap.read_config(args.config) mapinfo = jointmap.Mapset(config, args.sel) tsize = args.tsize # pixels pad = args.pad # pixels dtype = np.float64 ncomp = 1 comm = mpi.COMM_WORLD signals = args.signals.split(",") verbosity = args.verbose - args.quiet highpass_alpha = 3 utils.mkdir(args.odir) debug_tile = None if args.debug_tile is None else [int(w) for w in args.debug_tile.split(",")] # Get the set of bounding boxes, after normalizing them boxes = np.sort(np.array([d.box for d in mapinfo.datasets]),-2) # Read the cmb power spectrum, which is an effective noise # component. T-only cl_path = os.path.join(os.path.dirname(args.config),config.cl_background) cl_bg = powspec.read_spectrum(cl_path)[0,0] # Read our mask. High-resolution and per-dataset masks would be handled otherwise. # This approach only works for low-resolution masks if config.mask_mode == "none": mask = None elif config.mask_mode == "lowres":
if args.verbose: print("Writing %s" % ohit) enmap.write_map(ohit, w) # Two cases: Normal enmaps or dmaps if not os.path.isdir(imaps[0]): # Normal monotlithic map coadd_maps(imaps, ihits, args.omap, args.ohit, cont=args.cont, ncomp=args.ncomp) else: # Dmap. Each name is actually a directory, but they # all have compatible tile names. tilenames = get_tilenames(imaps[0]) utils.mkdir(args.omap) utils.mkdir(args.ohit) for tilename in tilenames[comm.rank::comm.size]: timaps = ["%s/%s" % (imap, tilename) for imap in imaps] tihits = ["%s/%s" % (ihit, tilename) for ihit in ihits] print("%3d %s" % (comm.rank, tilename)) coadd_maps(timaps, tihits, args.omap + "/" + tilename, args.ohit + "/" + tilename, cont=args.cont, ncomp=args.ncomp) if args.verbose: print("Done")
res = a.copy() res[np.isnan(res)] = 0 return res def add_maps(imaps, omap): if args.verbose: print "Reading %s" % imaps[0] m = nonan(enmap.read_map(imaps[0]))*scales[0] for scale, mif in zip(scales[1:],imaps[1:]): if args.verbose: print "Reading %s" % mif m += nonan(enmap.read_map(mif))*scale if args.mean: m /= len(imaps) if args.verbose: "Writing %s" % omap enmap.write_map(omap, m) def get_tilenames(dir): return sorted([name for name in os.listdir(dir) if name.endswith(".fits") or name.endswith(".hdf")]) # Two cases: Normal enmaps or dmaps if not os.path.isdir(args.imaps[0]): # Normal monotlithic map if comm.rank == 0: add_maps(args.imaps, args.omap) else: # Dmap. Each name is actually a directory, but they # all have compatible tile names. tilenames = get_tilenames(args.imaps[0]) utils.mkdir(args.omap) for tilename in tilenames[comm.rank::comm.size]: timaps = ["%s/%s" % (imap,tilename) for imap in args.imaps] print "%3d %s" % (comm.rank, tilename) add_maps(timaps, args.omap + "/" + tilename) if args.verbose: print"Done"
parser.add_argument("ofile") parser.add_argument("-d", "--dets", type=str, default=None) parser.add_argument("-D", "--absdets", type=str, default=None) parser.add_argument("-c", "--calib", action="store_true") parser.add_argument("-C", "--manual-calib", type=str, default=None) parser.add_argument("--bin", type=int, default=1) parser.add_argument("--nofft", action="store_true") parser.add_argument("--nodeslope", action="store_true") parser.add_argument("-F", "--fields", type=str, default=None) args = parser.parse_args() filedb.init() ids = filedb.scans[args.query] if len(ids) > 1: # Will process multiple files utils.mkdir(args.ofile) for id in ids: print id entry = filedb.data[id] subdets = None absdets = None if args.absdets is not None: absdets = [int(w) for w in args.absdets.split(",")] elif args.dets is not None: subdets = [int(w) for w in args.dets.split(",")] fields = ["gain","tconst","cut","tod","boresight"] if args.fields: fields = args.fields.split(",") d = actdata.read(entry, fields=fields) if absdets: d.restrict(dets=absdets) if subdets: d.restrict(dets=d.dets[subdets])
return 2 * h * nu**3 / c**2 / (np.exp(h * nu / k / T) - 1) def uK2mJ(amp, b1, b2): T0 = 2.73 nu = 148e9 dB = B(T0 + amp * 1e-6, nu) - B(T0, nu) return dB * 2 * np.pi * b1 * b2 / 1e-29 def output_dummy(id): with open(args.odir + "/samps%03d.txt" % id, "w") as ofile: pass utils.mkdir(args.odir) if args.nmax > 0: groups = groups[:args.nmax] for i in range(myid, len(groups), nproc): if i < args.i: continue group = groups[i] if args.cont: # If all our members are done, skip to next group try: lens = [ len(np.loadtxt(args.odir + "/samps%03d.txt" % j)) for j in group ] if np.min(lens) >= args.nsamp: continue
res["amplim"] = config.get("src_handling_lim") res["srcs"] = None srcfile = config.get("src_handling_list") if srcfile: res["srcs"] = pointsrcs.read(srcfile) return res src_handling = parse_src_handling() filedb.init() db = filedb.data filelist = todinfo.get_tods(args.filelist, filedb.scans) if args.group_tods: filelist = scanutils.get_tod_groups(filelist) utils.mkdir(args.odir) root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") # Dump our settings if comm.rank == 0: config.save(root + "config.txt") with open(root + "args.txt","w") as f: argstring = " ".join([pipes.quote(a) for a in sys.argv[1:]]) f.write(argstring + "\n") print argstring with open(root + "env.txt","w") as f: for k,v in os.environ.items(): f.write("%s: %s\n" %(k,v)) with open(root + "ids.txt","w") as f: for id in filelist: f.write("%s\n" % str(id))
parser.add_argument("-S", "--corr-spacing", type=float, default=2) parser.add_argument("--srcsub", type=int, default=1) parser.add_argument("-M", "--mapsub", type=str, default=None) parser.add_argument("-I", "--inject", type=str, default=None) parser.add_argument("--only", type=str) parser.add_argument("--static", action="store_true") parser.add_argument("-c", "--cont", action="store_true") parser.add_argument("-D", "--dayerr", type=str, default="-1:1,-2:4") parser.add_argument("--srclim-day", type=float, default=50) # These should ideally be moved into the general tod autocuts parser.add_argument("-a", "--asteroid-file", type=str, default=None) parser.add_argument("--asteroid-list", type=str, default=None) args = parser.parse_args() comm = mpi.COMM_WORLD utils.mkdir(args.odir) shape, wcs = enmap.read_map_geometry(args.area) wshape = (3, ) + shape[-2:] dtype = np.float32 if config.get("map_bits") == 32 else np.float64 root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") sys = config.get("map_sys") ym = utils.arcmin / utils.yr2days # Bias source amplitudes 0.1% towards their fiducial value amp_prior = 1e-3 dayerr = np.array([[float(w) for w in tok.split(":")] for tok in args.dayerr.split(",") ]).T # [[x1,y1],[x2,y2]] only = [int(word) for word in args.only.split(",")] if args.only else []
parser.add_argument("--demode", action="store_true") parser.add_argument("--decommon", action="store_true") parser.add_argument("-c", "--cont", action="store_true") args = parser.parse_args() filedb.init() comm_world = mpi.COMM_WORLD comm_group = comm_world.Split(comm_world.rank%args.nsub, comm_world.rank/args.nsub) comm_sub = comm_world.Split(comm_world.rank/args.nsub, comm_world.rank%args.nsub) ids = todinfo.get_tods(args.sel, filedb.scans) tol = args.tol*utils.arcmin daz = args.daz*utils.arcmin dtype = np.float32 if config.get("map_bits") == 32 else np.float64 tods_per_map = args.group utils.mkdir(args.odir) root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") # Set up logging utils.mkdir(root + "log") logfile = root + "log/log%03d.txt" % comm_world.rank log_level = log.verbosity2level(config.get("verbosity")) L = log.init(level=log_level, file=logfile, rank=comm_world.rank, shared=False) # Run through all tods to determine the scanning patterns L.info("Detecting scanning patterns") boxes = np.zeros([len(ids),2,2]) for ind in range(comm_world.rank, len(ids), comm_world.size): id = ids[ind] entry = filedb.data[id] try: d = actdata.read(entry, ["boresight","tconst","cut","cut_noiseest"])
ifiles = [ifiles[ind] for ind in inds] # Look for duplicates dups = np.where(ids[1:] == ids[:-1])[0] if len(dups) > 0: # Get the first example if comm.rank == 0: print "Duplicate ids in input: " + ", ".join( [ifiles[dups[0]], ifiles[dups[1]]]) comm.Finalize() sys.exit(1) nfile = len(ifiles) # Make tmp directory for output tmpdir = args.ofile + ".tmp" tmpfmt = tmpdir + "/cut%03d.hdf" utils.mkdir(tmpdir) # Process each of our blocks with h5py.File(tmpfmt % comm.rank, "w") as hfile: for j, i in enumerate( range(comm.rank * nfile / comm.size, (comm.rank + 1) * nfile / comm.size)): ifile, id = ifiles[i], ids[i] progress = min(comm.rank + j * comm.size, nfile - 1) print "%5d/%d %5.1f%% %s" % (progress + 1, nfile, 100.0 * (progress + 1) / nfile, id) dets, gain = files.read_gain(ifile) dtype = [("det_uid", "i"), ("cal", "f")] res = np.zeros(len(dets), dtype) res["det_uid"] = dets res["cal"] = gain
def output(res, dir): utils.mkdir(dir) for i, (tqu, teb, desc) in enumerate(zip(res.tqu, res.teb, res.desc)): enmap.write_map("%s/%02d_%s_tqu.hdf" % (dir,i+1,desc), tqu) enmap.write_map("%s/%02d_%s_teb.hdf" % (dir,i+1,desc), teb)
def mkoutdir(outdir): if comm.Get_rank() == 0: utils.mkdir(outdir)
parser.add_argument("--ndet", type=int, default=0) args = parser.parse_args() precon = config.get("map_precon") dtype = np.float32 if config.get("map_bits") == 32 else np.float64 comm = mpi4py.MPI.COMM_WORLD myid = comm.rank nproc = comm.size nmax = config.get("map_cg_nmax") db = filedb.ACTFiles(config.get("filedb")) filelist = todinfo.get_tods(args.filelist, config.get("todinfo")) area = enmap.read_map(args.area) area = enmap.zeros((args.ncomp, ) + area.shape[-2:], area.wcs, dtype) utils.mkdir(args.odir) root = args.odir + "/" + (args.prefix + "_" if args.prefix else "") # Dump our settings if myid == 0: config.save(root + "config.txt") with open(root + "args.txt", "w") as f: f.write(" ".join([pipes.quote(a) for a in sys.argv[1:]]) + "\n") with open(root + "env.txt", "w") as f: for k, v in os.environ.items(): f.write("%s: %s\n" % (k, v)) with open(root + "ids.txt", "w") as f: for id in filelist: f.write("%s\n" % id) shutil.copyfile(config.get("filedb"), root + "filedb.txt") # Set up logging
def build_single(ifile, srcs, beam, ofile, mask_level=0, apod_size=16): imap = enmap.read_map(ifile) omap, oslice = pointsrcs.sim_srcs(imap.shape[-2:], imap.wcs, srcs, beam, return_padded=True) if mask_level: mask = omap > mask_level omap = 1 - np.cos( np.minimum(1, ndimage.distance_transform_edt(1 - mask) / 16.0) * np.pi) omap = enmap.samewcs(omap, imap) omap = omap[oslice] enmap.write_map(ofile, omap) if os.path.isdir(args.ifile): utils.mkdir(args.ofile) ifiles = sorted(glob.glob(args.ifile + "/tile*.fits"))[::-1] for ind in range(comm.rank, len(ifiles), comm.size): ifile = ifiles[ind] print(ifile) ofile = args.ofile + "/" + os.path.basename(ifile) build_single(ifile, srcs, beam, ofile, args.mask, args.apod) else: print(args.ifile) build_single(args.ifile, srcs, beam, args.ofile, args.mask, args.apod)
comm = mpi.COMM_WORLD myid = comm.rank nproc = comm.size filedb.init() db = filedb.data filelist = todinfo.get_tods(args.filelist, filedb.scans) def compress_beam(sigma, phi): c,s=np.cos(phi),np.sin(phi) R = np.array([[c,-s],[s,c]]) C = np.diag(sigma**-2) C = R.dot(C).dot(R.T) return np.array([C[0,0],C[1,1],C[0,1]]) utils.mkdir(args.odir) # Dump our settings if myid == 0: config.save(args.odir + "/config.txt") with open(args.odir + "/args.txt","w") as f: f.write(" ".join([pipes.quote(a) for a in sys.argv[1:]]) + "\n") with open(args.odir + "/env.txt","w") as f: for k,v in os.environ.items(): f.write("%s: %s\n" %(k,v)) with open(args.odir + "/ids.txt","w") as f: for id in filelist: f.write("%s\n" % id) shutil.copyfile(filedb.cjoin(["root","dataset","filedb"]), args.odir + "/filedb.txt") try: shutil.copyfile(filedb.cjoin(["root","dataset","todinfo"]), args.odir + "/todinfo.txt") except IOError: pass # Set up logging
parser.add_argument( "--max-sens", type=float, default=20, help= "Reject detectors more than this times more sensitive than the median at any of the indicated frequencies. Set to 0 to disable." ) parser.add_argument("--full-stats", action="store_true") args = parser.parse_args() comm = mpi.COMM_WORLD srate = 400. fmax = srate / 2 ndet = 32 * 33 utils.mkdir(args.odir) tmp = [[float(tok) for tok in word.split(":")] for word in args.f.split(",")] bins = np.array([[t[0] - t[1] / 2, t[0] + t[1] / 2] for t in tmp]) rate = [float(w) for w in args.R.split(":")] filedb.init() ids = filedb.scans[args.sel] ntod = len(ids) cuts = np.zeros([ntod, ndet], dtype=np.uint8) stats = None if args.full_stats: stats = np.zeros([ntod, ndet, 4]) for si in range(comm.rank, ntod, comm.size): try: id = ids[si]
srcdata = np.array([ra*u.degree, dec*u.degree, amp, omg, phi, D]) # if only one source is specified, make sure it has a shape # compatible with multiple sources if len(srcdata.shape) == 1: srcdata = srcdata[:, None] return srcdata filedb.init() db = filedb.scans.select(args.sel) ids = db.ids sys = args.sys dtype = np.float32 verbose = args.verbose - args.quiet down = config.get("downsample") poly_pad = 3*u.degree bounds = db.data["bounds"] u.mkdir(args.odir) # load source information srcdata = read_srcs(args.catalog) srcpos, amps, omg, phi, D = srcdata[:2], srcdata[2], srcdata[3], srcdata[4], srcdata[5] if len(srcpos.shape) == 1: srcpos = srcpos[:,None] # Which sources pass our requirements? base_sids = set(range(amps.size)) if args.minamp is not None: base_sids &= set(np.where(amps > args.minamp)[0]) if args.srcs is not None: selected = [int(w) for w in args.srcs.split(",")] base_sids &= set(selected) base_sids = list(base_sids) for ind in range(len(ids)):