def load_pha(self, id, arg=None, use_errors=False): """Load multiple data arrays. This extends ``sherpa.astro.ui.load_arrays`` to load multiple data sets with one call. The usual ``filename`` argument can be a stack file with multiple data files defined in it. In this case, the load function will be called as many times as datasets are included in the stack file. """ if arg is None: id, arg = arg, id if id is not None: if self._default_instance: ui.load_pha(id, arg, use_errors) return else: raise AttributeError(load_error_msg(id)) # File Stacks. If the file argument is a stack file, expand the # file and call this function for each file in the stack. try: files = stk.build(arg) for file in files: self._load_func(ui.load_pha, file, use_errors) except: self._load_func(ui.load_pha, arg, use_errors)
def load_pha(self, id, arg=None, use_errors=False): """Load multiple data arrays. This extends ``sherpa.astro.ui.load_arrays`` to load multiple data sets with one call. The usual ``filename`` argument can be a stack file with multiple data files defined in it. In this case, the load function will be called as many times as datasets are included in the stack file. """ if arg is None: id, arg = arg, id if id is not None: if self._default_instance: ui.load_pha(id, arg, use_errors) return else: raise AttributeError(load_error_msg(id)) # File Stacks. If the file argument is a stack file, expand the # file and call this function for each file in the stack. try: for infile in stk.build(arg): self._load_func(ui.load_pha, infile, use_errors) except (NameError, OSError, IOErr): self._load_func(ui.load_pha, arg, use_errors)
def test_build_stack(self): def get_name(name): return '/'.join((_this_dir, name)) out = stk.build('@+{}/{}'.format(_this_dir, 'a.lis')) self.assertEquals([get_name('a'), get_name('a1'), get_name('a2'), get_name('b'), get_name('b1'), get_name('b2')], out)
def parse_refpos(refpos): """Given a reference position from a user - a string that can be empty, a file name, or ra/dec values, return the tuple (ra, dec, filename) where the values are None if not relevant, otherwise are numbers or a string. If no values are given then returns None. """ if refpos.strip() == "": return None elif os.path.isfile(refpos): v3("Reference position is a file.") return (None, None, refpos) v3("Extracting reference position from " + refpos) # Assume that no spaces are used to separate out # sexagesimal formats, so that we can treat the # argument as a stack # coord = stk.build(refpos) if len(coord) != 2: raise ValueError( "Unable to parse {} as a ra and dec value.".format(refpos)) ra = coords.format.ra2deg(coord[0]) dec = coords.format.dec2deg(coord[1]) return (ra, dec, None)
def _get_fovs(self, infile): """ Load a stack of FOV files. """ from pycrates import read_file import stk as stk fovs = stk.build(infile) if len(fovs) == 0: raise IOError("No fov files found in " + infile) self.fovs = {} for ff in fovs: # Just use the file name. Previous version tried # to use obsid/obi, but this is most generic oo = ff if oo in self.fovs: # This may happen for interleaved mode datasets (e1, e2) # or for other reasons. No reason to error out, just # skip and continue verb1("Multiple files for obsid {}. Skipping file {}".format( oo, ff)) continue # By selecting the ra,dec columns we get the region # in degrees, not pixels, so we can ask # whether a point in cel coords is inside or not. rr = regParse("region({}[cols ra,dec,shape,component])".format(ff)) # Store region object self.fovs[oo] = rr
def find_files(fnames, absolute=True, cwd='.', keepnone=True): """Return an array of file names giving the absolute (absolute=True) or relative path to fnames, if they are all found, otherwise None is returned. fnames is a string and is treated as a stack. cwd gives the location of the directory to use as the base when calculating relative paths (so it is only used if absolute=False). If keepnone is True then a value of NONE is retained, otherwise the return value is None rather than an array. This lets you identify when a file value was given as NONE versus it not being found. """ out = [] v4("Calling find_files on " + fnames) for fname in stk.build(fnames): if fname == 'NONE': if keepnone: out.append(fname) continue else: return None path = find_file(fname, absolute=absolute, cwd=cwd) if path is None: return None out.append(path) return out
def _load(self, *args, **kwargs): if len(args) == 1: id, arg = None, args[0] args = [] if len(args) > 1: args = args[1:] if id is not None: if self is DATASTACK: self._load_func(load_func, id, arg, *args, **kwargs) return else: raise AttributeError( "When called from a datastack instance, an ID cannot be provided to a load function (" + id + ")") # File Stacks. If the file argument is a stack file, expand the file and call this function for each file # in the stack. try: files = stk.build(arg) for file in files: self._load_func(load_func, file, *args, **kwargs) except: self._load_func(load_func, arg, *args, **kwargs)
def test_build_stack(self): import stk def get_name(name): return '/'.join((_this_dir, name)) out = stk.build('@+{}/{}'.format(_this_dir, 'a.lis')) self.assertEqual([get_name('a'), get_name('a1'), get_name('a2'), get_name('b'), get_name('b1'), get_name('b2')], out)
def test_build_stack_separator(sep): """We can use commas/spaces to separate entries""" import stk expected = ['a', 'a1', 'a2', 'b', 'b1', 'b2'] out = stk.build(sep.join(expected)) for outval, expval in zip(out, expected): assert outval == expval assert len(out) == len(expected)
def test_build_stack2(): """We can use @- syntax to read from a file""" import stk names = ['a', 'a1', 'a2', '@b.lis'] out = stk.build('@-{}/{}'.format(_this_dir, 'a.lis')) for outval, expval in zip(out, names): assert outval == expval assert len(out) == len(names)
def test_build_stack(): """We can use @+ syntax to read from a file""" import stk names = ['a', 'a1', 'a2', 'b', 'b1', 'b2'] expected = [get_name(n) for n in names] out = stk.build('@+{}/{}'.format(_this_dir, 'a.lis')) for outval, expval in zip(out, expected): assert outval == expval assert len(out) == len(expected)
def test_build_stack_lgrid(): """We can use lgrid syntax We assume rgrid, pgrid, and igrid work if this does. """ import stk names = [10, 12, 14, 16, 18, 20] expected = [str(n) for n in names] out = stk.build('lgrid(10:20:2)') for outval, expval in zip(out, expected): assert outval == expval assert len(out) == len(expected)
def validate_hrc_bands(inputs): """Ensure bands are valid for HRC data. The current implementation will only ever return a singleton list; an error is raised if multiple distinct bands are input. """ v3("Validating energy bands for HRC") # HRC data can use # file name # wide # default # ::enmono # pilo:pihi:enmono # # where enmono is in keV and pilo/pihi are in # channels. out = [] check = set() bandnum = 1 for espec in stk.build(inputs): if os.path.exists(espec): lbl = "band{}".format(bandnum) bandnum += 1 band = HRCWeightedEnergyBand(espec, lbl) else: espec = espec.replace(" ", "").lower() if espec == "default": espec = "wide" band = HRCEnergyBand(espec) if band.key in check: continue out.append(band) check.add(band.key) if len(out) > 1: raise ValueError( "Unable to set multiple bands with HRC data (bands={})".format( inputs)) return out
def load_pha(self, id, arg=None, use_errors=False): if arg is None: id, arg = arg, id if id is not None: if self is DATASTACK: ui.load_pha(id, arg, use_errors) return else: raise AttributeError("When called from a datastack instance, an ID cannot be provided to a load function ("+id+")") # File Stacks. If the file argument is a stack file, expand the file and call this function for each file # in the stack. try: files = stk.build(arg) for file in files: self._load_func(ui.load_pha, file, use_errors) except: self._load_func(ui.load_pha, arg, use_errors)
def _load(self, *args, **kwargs): if len(args)==1: id, arg = None, args[0] args=[] if len(args)>1: args = args[1:] if id is not None: if self is DATASTACK: self._load_func(load_func, id, arg, *args, **kwargs) return else: raise AttributeError("When called from a datastack instance, an ID cannot be provided to a load function ("+id+")") # File Stacks. If the file argument is a stack file, expand the file and call this function for each file # in the stack. try: files = stk.build(arg) for file in files: self._load_func(load_func, file, *args, **kwargs) except: self._load_func(load_func, arg, *args, **kwargs)
def _load(self, *args, **kwargs): if len(args) == 1: id, arg = None, args[0] args = [] if len(args) > 1: args = args[1:] if id is not None: if self._default_instance: self._load_func(load_func, id, arg, *args, **kwargs) return else: raise AttributeError(load_error_msg(id)) # File Stacks. If the file argument is a stack file, expand # the file and call this function for each file in the stack. try: files = stk.build(arg) for file in files: self._load_func(load_func, file, *args, **kwargs) except: self._load_func(load_func, arg, *args, **kwargs)
def validate_acis_bands(inputs): "Ensure bands are valid for ACIS data." v3("Validating energy bands for ACIS") # ACIS data can use # file name # default # ultrasoft, soft, medium, hard, broad, csc # elo:ehi:enmono # # We could do this in one loop, but split out to make it a # bit clearer to read. # # would use a set for especs, but need to retain the order especs = [] def add_espec(espec): if espec not in especs: especs.append(espec) for espec in stk.build(inputs): if os.path.isfile(espec): add_espec((True, espec)) else: espec = espec.replace(" ", "").lower() if espec == "default": add_espec((False, "broad")) elif espec == "csc": add_espec((False, "soft")) add_espec((False, "medium")) add_espec((False, "hard")) else: add_espec((False, espec)) out = [] check1 = set() check2 = {} bandnum = 1 for (isfile, espec) in especs: if isfile: lbl = "band{}".format(bandnum) bandnum += 1 band = ACISWeightedEnergyBand(espec, lbl) else: band = ACISEnergyBand(espec) if band.key in check1: continue # By checking on the userlabel we are really checking on # energy ranges when given explicitly: i.e. 0.5:2:1,0.5:2:1.5 # will be caught here, but 0.5:7:2,broad will be allowed # through. # try: espec2 = check2[band.bandlabel] raise ValueError( "Bands {} and {} have the same label ({}).".format( espec, espec2, band.userlabel)) except KeyError: pass out.append(band) check1.add(band.key) check2[band.bandlabel] = espec return out
def doit(ensemblestk, stackmapfile, outdir, svdqafile, centroidfile, mrgsrc3dir="", verbose=False): """Process the ensembles to find master hulls. If the output directory for an ensemble exists it is skipped. Parameters ---------- ensemblestk : str The stack of ensembles - e.g. "ens00000500_001,..." or "@ensmap.dat". stackmapfile : str The file must have columns ensemble and stack, with one stack per line. outdir : str The name of the output directory; it will be created if it does not exist. svdqafile : str The name of the file containing the stack ids that went to SVD QA. The first column of this file is used as the stack id. The full path is written to the SVDQAFIL header keyword. centroidfile : str or None, optional If given theh the name of the file containing the stack,cpt, include_centroid information (a partial list). Used to set up the USE_CEN column. mrgsrc3dir : str, optional Passed through as the --mrgsrc3dir value if not empty. verbose : bool, optional If set then displays the command being run. """ # Assume the directory name does not have leading or trailing # white space. # mrgsrc3dir = mrgsrc3dir.strip() if mrgsrc3dir != "" and not os.path.isdir(mrgsrc3dir): raise IOError("Unable to find " + "mrgsrc3dir={}".format(mrgsrc3dir)) # Look for the tool in the same directory as this script dirname = os.path.dirname(__file__) toolname = os.path.join(dirname, "chs_create_initial_masters.py") if not os.path.exists(toolname): raise IOError("Unable to find '{}'".format(toolname)) ensnames = stk.build(ensemblestk) ntot = len(ensnames) if ntot == 0: print("No ensembles found. That's surprising.") return if os.path.exists(outdir): if not os.path.isdir(outdir): raise IOError("outdir '{}' ".format(outdir) + "is not a directory!") else: print("Creating: {}".format(outdir)) os.mkdir(outdir) failed = [] for i, ensname in enumerate(ensnames): sys.stdout.write("[{}/{}] {}\n".format(i + 1, ntot, ensname)) sys.stdout.flush() dirname = os.path.join(outdir, ensname) if os.path.exists(dirname): print(" - skipping as {} exists".format(dirname)) if not os.path.isdir(dirname): print(" WARNING: not a directory!") continue logfile = os.path.join(outdir, 'log.' + ensname) if os.path.exists(logfile): os.remove(logfile) args = [ 'python', toolname, svdqafile, centroidfile, stackmapfile, ensname, dirname ] if mrgsrc3dir != "": args.extend(["--mrgsrc3dir", mrgsrc3dir]) if verbose: # assume no protection/quoting needed print(">> {}".format(" ".join(args))) try: out = check_output(args, stderr=STDOUT) if not six.PY2: out = out.decode('ascii') except CalledProcessError as exc: out = "ERROR: ensemble={}\n{}\n".format(ensname, exc) + \ "\n" if six.PY2: out += exc.output else: out += exc.output.decode('ascii') sys.stdout.write(" FAILED\n") sys.stdout.flush() failed.append(ensname) with open(logfile, 'w') as fh: fh.write(out) nfail = len(failed) if nfail == 0: print("All ran successfully.") return print("") if nfail == ntot: print("*** They ALL failed!\n") elif nfail == 1: print("*** There was one failure:") else: print("*** There were {} failures:".format(nfail)) if nfail != ntot: for i, ensname in enumerate(failed): print(" {}/{} {}".format(i + 1, nfail, ensname)) sys.exit(1)
parser.add_argument( "outdir", help="Files are written to this directory (must exist)") parser.add_argument("names", nargs='?', default=None, help="Restrict to these names (stack syntax)") parser.add_argument("--debug", action="store_true", help="Print out parsed output") parser.add_argument("--sxml", action="store_true", help="Use the SXML rather than AHELp dtd") parser.add_argument("--models", action="store_true", help="Restrict to Sherpa models only") args = parser.parse_args(sys.argv[1:]) restrict = args.names if restrict is not None: restrict = stk.build(restrict) dtd = 'sxml' if args.sxml else 'ahelp' convert(args.outdir, dtd=dtd, modelsonly=args.models, debug=args.debug, restrict=restrict)
fp = paramopen("xap.par", "wL", sys.argv) evtfile = pget(fp, "infile") outfile = pget(fp, "outfile") breg = pget(fp, "breg") srcstack = pget(fp, "srcstack") psfstack = pget(fp, "psfstack") expstack = pget(fp, "expstack") CL_desired = pgetd(fp, "CL") intenstack = pget(fp, "intenstack") nmesh = pgeti(fp, "nmesh") clob = pgetb(fp, "clobber") verb = pgeti(fp, "verbose") paramclose(fp) sregs = stk.build("@" + srcstack) # Read psfstack and test for null values psfs = array([]) try: psfs = stk.build("@" + psfstack) except: print( "Unable to find PSF files.\nSetting source region ecfs to 1.0\n and background ecf to 0.0\n" ) # Read expstack and test for null values exps = array([]) try: exps = stk.build("@" + expstack) except:
def expand_evtfiles_stack(instack, pattern="*evt*"): """Expand the instack input into an array of event files. For each element in the stack check if it is a file or directory, ignoring it if it is neither. If it is a file we use a DataModel open to see if we can find the file (so that Data Model filters are checked/handled properly). If it is a directory then look for the first match to <dirname>/repro/<pattern> <dirname>/primary/<pattern> <dirname>/<pattern> and add them (can have multiple matches). If the directory contains no matches then ignore it. Returns an array of file names, which can be empty. Note that the routine does not check that each file is an event file, it just checks that it is a file. """ v3("Expanding out the event file stack: {}".format(instack)) out = [] for elem in stk.build(instack): v3(" - verifying element {}".format(elem)) if os.path.isdir(elem): v3(" a directory") match = [] for dname in ["repro", "primary", ""]: dpath = os.path.join(elem, dname, pattern) match = glob.glob(dpath) if match != []: break if match != []: v3(" -> {}".format(match)) for mname in match: v1("Found {}".format(mname)) out.extend(match) else: v1("Skipping directory {} as no event files found in it.". format(elem)) else: # Try and cover common error cases - such as input is an # image instead of a table - but it's hard to tell between # a missing file or an incorrect DM filter without parsing # the error messages from the DataModel, which is fragile. # try: bl = cxcdm.dmTableOpen(elem) v3(" a table") cxcdm.dmTableClose(bl) out.append(elem) except IOError as ie: v3("Error message from table open: {}".format(ie)) try: bl = cxcdm.dmImageOpen(elem) cxcdm.dmImageClose(bl) v3("Oops, {} is an image".format(elem)) v1("Skipping {} as it is an image.".format(elem)) except IOError: # The input file might not exist, but it also # could be an invalid virtual-file expression, # such as # "evt2.fits[bob=10:20]" # "evt2.fits[cols sky,bob]" # when bob does exist. # v1("Skipping {} as it can not be opened.".format(elem)) v3(" -> stack={}".format(out)) return out
print("# Starting") coords = [] for ver, infile in store.values(): # original should be version=1 # this should be a per-hull value but currently it's per-ensemble changed = ver > 1 for hull, center, label in read_mhull(infile): coords.append({'changed': changed, 'label': label, 'ra': center[0], 'dec': center[1], 'coords': hull}) with open(outfile, 'w+') as fh: fh.write(json.dumps(coords)) print("Created: {}".format(outfile)) if __name__ == "__main__": import sys if len(sys.argv) != 3: sys.stderr.write("Usage: {} infiles outfile\n".format(sys.argv[0])) sys.exit(1) import stk infiles = stk.build(sys.argv[1]) process(infiles, sys.argv[2])
out[stack] = ans # Should this get converted to ISO 8661 (or whatever) format? out['lastupdate'] = time.asctime() print(json.dumps(out)) def usage(progname): sys.stderr.write("Usage: {} stkfile props\n".format(progname)) sys.exit(1) if __name__ == "__main__": import sys import stk if len(sys.argv) != 3: usage(sys.argv[0]) infile = sys.argv[1] props = stk.build(sys.argv[2]) """ for p in props: if p not in valid_props: raise ValueError("Invalid property '{}'".format(p)) """ find_stack_filenames(infile, props)
if __name__ == '__main__': from optparse import OptionParser parser = OptionParser(usage='%prog [options] <region_file> <evt2_file> <pbk_file> <asol_file> <msk_file>\n\nArguments:\n <region_file> region file in CIAO format (may be list; if so, prepend filename with "@")\n <evt2_file> events file\n <pbk_file> pbk0 file\n <asol_file> asol1 file (may be list; if so, prepend filename with "@")\n <msk_file> msk1 file', version="%prog 0.5") parser.add_option('--root', dest='root', help='root for output files; default = None', metavar='VAL', default=None) parser.add_option('--bg_region', dest='bg_region', help='background region file; default = None', metavar='FILE', default=None) parser.add_option('--bg_file', dest='bg_file', help='background fits file; default = None', metavar='FILE', default=None) parser.add_option('--bin', dest='bin_cnts', help='number of counts per spectral bin; default = None', metavar='VAL', default=None) parser.add_option('--ncores', dest='ncores', help='number of cores to use; default = all', metavar='VAL', default=None) parser.add_option('-c', action='store_true', dest='clobber', help='clobber any existing files', default=False) (options, args) = parser.parse_args() if len(args) == 5: # stk.build reads in a stack - e.g. a single value, or a # comma-separated list of names, or a filename with a leading # '@' and returns a list of values. region_list = stk.build(args[0]) evt2_file = args[1] pbk_file = args[2] asol_file = args[3] msk_file = args[4] root = options.root bg_file = options.bg_file bg_region = options.bg_region clobber = options.clobber binning = options.bin_cnts ncores = options.ncores # TODO: stack_to_list could be simplified by using stk.build # but leave that for a later date. asol_list = stack_to_list(asol_file)