def get_pha_data(arg, make_copy=True, use_background=False): """ get_pha_data( filename [, make_copy=True [, use_background=False]]) get_pha_data( PHACrate [, make_copy=True [, use_background=False]]) """ filename = '' close_dataset = False if type(arg) == str: phadataset = open_crate_dataset(arg, pycrates.phacratedataset.PHACrateDataset) if pycrates.is_pha(phadataset) != 1: raise IOErr('badfile', arg, "PHACrateDataset obj") filename = arg close_dataset = True elif pycrates.is_pha(arg) == 1: phadataset = arg filename = arg.get_filename() make_copy=False else: raise IOErr('badfile', arg, "PHACrateDataset obj") pha = _get_crate_by_blockname(phadataset, "SPECTRUM") if pha is None: pha = phadataset.get_crate(phadataset.get_current_crate()) if (pha.get_key('HDUCLAS1').value == 'SPECTRUM' or pha.get_key('HDUCLAS2').value == 'SPECTRUM'): pass else: pha = phadataset.get_crate(1) if (pha.get_key('HDUCLAS1').value == 'SPECTRUM' or pha.get_key('HDUCLAS2').value == 'SPECTRUM'): pass else: # If background maybe better to go on to next block? pha = None if use_background: # Used to read BKGs found in an additional block of # Chandra Level 3 PHA files for ii in range(phadataset.get_ncrates()): block = phadataset.get_crate(ii+1) hduclas2 = block.get_key('HDUCLAS2') if hduclas2 is not None and hduclas2.value == 'BKG': pha = block if pha is None or pha.get_colnames() is None: raise IOErr('filenotfound', arg) keys = ['BACKFILE','ANCRFILE','RESPFILE', 'BACKSCAL','AREASCAL','EXPOSURE'] keys_or_cols = ['BACKSCAL','BACKSCUP','BACKSCDN','AREASCAL'] datasets = [] # Calling phadataset.is_pha_type1() is unreliable when # both TYPE:I and TYPE:II keywords are in the header. # Here, I instead test for a column, SPEC_NUM, that can # *only* be present in Type II. SMD 05/15/13 if _try_col(pha, 'SPEC_NUM') is None: data = {} # Keywords data['exposure'] = _try_key(pha, 'EXPOSURE', SherpaFloat) #data['poisserr'] = _try_key(pha, 'POISSERR', bool) data['backfile'] = _try_key(pha, 'BACKFILE') data['arffile'] = _try_key(pha, 'ANCRFILE') data['rmffile'] = _try_key(pha, 'RESPFILE') # Keywords or columns for name in keys_or_cols: key = name.lower() data[key] = _try_key(pha, name, SherpaFloat) if data[key] is None: data[key] = _try_col(pha, name, make_copy) data['header'] = _get_meta_data(pha) for key in keys: try: data['header'].pop(key) except KeyError: pass # Columns if not pha.column_exists('CHANNEL'): raise IOErr('reqcol', 'CHANNEL', filename) data['channel'] = _require_col(pha, 'CHANNEL', make_copy, fix_type=True) # Make sure channel numbers, not indices if int(data['channel'][0]) == 0 or pha.get_column('CHANNEL').get_tlmin() == 0: data['channel'] = data['channel']+1 data['counts'] = None if pha.column_exists('COUNTS'): data['counts'] = _require_col(pha, 'COUNTS', make_copy, fix_type=True) else: if not pha.column_exists('RATE'): raise IOErr('reqcol', 'COUNTS or RATE', filename) data['counts'] = _require_col(pha, 'RATE', make_copy, fix_type=True)*data['exposure'] data['staterror'] = _try_col(pha, 'STAT_ERR', make_copy) data['syserror'] = _try_col(pha, 'SYS_ERR', make_copy) data['background_up'] = _try_col(pha, 'BACKGROUND_UP', make_copy, fix_type=True) data['background_down'] = _try_col(pha, 'BACKGROUND_DOWN', make_copy, fix_type=True) data['bin_lo'] = _try_col(pha, 'BIN_LO', make_copy,fix_type=True) data['bin_hi'] = _try_col(pha, 'BIN_HI', make_copy,fix_type=True) data['grouping'] = _try_col(pha, 'GROUPING', make_copy) data['quality'] = _try_col(pha, 'QUALITY', make_copy) datasets.append(data) else: # Type 2 PHA file support data = {} num = pha.get_nrows() # Keywords exposure = _try_key(pha, 'EXPOSURE', SherpaFloat) #poisserr = _try_key(pha, 'POISSERR', bool) backfile = _try_key(pha, 'BACKFILE') arffile = _try_key(pha, 'ANCRFILE') rmffile = _try_key(pha, 'RESPFILE') # Keywords or columns backscal = _try_key_list(pha, 'BACKSCAL', num) if backscal is None: backscal = _try_col_list(pha, 'BACKSCAL', num, make_copy) backscup = _try_key_list(pha, 'BACKSCUP', num) if backscup is None: backscup = _try_col_list(pha, 'BACKSCUP', num, make_copy) backscdn = _try_key_list(pha, 'BACKSCDN', num) if backscdn is None: backscdn = _try_col_list(pha, 'BACKSCDN', num, make_copy) areascal = _try_key_list(pha, 'AREASCAL', num) if areascal is None: areascal = _try_col_list(pha, 'AREASCAL', num, make_copy) # Columns if not pha.column_exists('CHANNEL'): raise IOErr('reqcol', 'CHANNEL', filename) channel = _require_col_list(pha, 'CHANNEL', num, make_copy, fix_type=True) # Make sure channel numbers, not indices for ii in range(num): if int(channel[ii][0]) == 0: channel[ii] += 1 counts = None if pha.column_exists('COUNTS'): counts = _require_col_list(pha, 'COUNTS', num, make_copy, fix_type=True) else: if not pha.column_exists('RATE'): raise IOErr('reqcol', 'COUNTS or RATE', filename) counts = _require_col_list(pha, 'RATE', num, make_copy, fix_type=True) * exposure staterror = _try_col_list(pha, 'STAT_ERR', num, make_copy) syserror = _try_col_list(pha, 'SYS_ERR', num, make_copy) background_up = _try_col_list(pha, 'BACKGROUND_UP', num, make_copy, fix_type=True) background_down = _try_col_list(pha, 'BACKGROUND_DOWN', num, make_copy, fix_type=True) bin_lo = _try_col_list(pha, 'BIN_LO', num, make_copy, fix_type=True) bin_hi = _try_col_list(pha, 'BIN_HI', num, make_copy, fix_type=True) grouping = _try_col_list(pha, 'GROUPING', num, make_copy) quality = _try_col_list(pha, 'QUALITY', num, make_copy) orders = _try_key_list(pha, 'TG_M', num) if orders is None: orders = _try_col_list(pha, 'TG_M', num, make_copy) parts = _try_key_list(pha, 'TG_PART', num) if parts is None: parts = _try_col_list(pha, 'TG_PART', num, make_copy) specnums = _try_col_list(pha, 'SPEC_NUM', num, make_copy) srcids = _try_col_list(pha, 'TG_SRCID', num, make_copy) # Iterate over all rows of channels, counts, errors, etc # Populate a list of dictionaries containing individual dataset info for (bscal, bscup, bscdn, arsc, chan, cnt, staterr, syserr, backup, backdown, binlo, binhi, grp, qual, ordr, prt, specnum, srcid ) in izip(backscal, backscup, backscdn, areascal, channel, counts, staterror, syserror, background_up, background_down, bin_lo, bin_hi, grouping, quality, orders, parts, specnums, srcids): data = {} data['exposure'] = exposure #data['poisserr'] = poisserr data['backfile'] = backfile data['arffile'] = arffile data['rmffile'] = rmffile data['backscal'] = bscal data['backscup'] = bscup data['backscdn'] = bscdn data['areascal'] = arsc data['channel'] = chan data['counts'] = cnt data['staterror'] = staterr data['syserror'] = syserr data['background_up'] = backup data['background_down'] = backdown data['bin_lo'] = binlo data['bin_hi'] = binhi data['grouping'] = grp data['quality'] = qual data['header'] = _get_meta_data(pha) data['header']['TG_M'] = ordr data['header']['TG_PART'] = prt data['header']['SPEC_NUM'] = specnum data['header']['TG_SRCID'] = srcid for key in keys: try: data['header'].pop(key) except KeyError: pass datasets.append(data) if close_dataset: close_crate_dataset(phadataset) return datasets, filename
def get_pha_data(arg, make_copy=True, use_background=False): """ get_pha_data( filename [, make_copy=True [, use_background=False]]) get_pha_data( PHACrate [, make_copy=True [, use_background=False]]) """ filename = '' close_dataset = False if type(arg) == str: phadataset = open_crate_dataset( arg, pycrates.phacratedataset.PHACrateDataset) if pycrates.is_pha(phadataset) != 1: raise IOErr('badfile', arg, "PHACrateDataset obj") filename = arg close_dataset = True elif pycrates.is_pha(arg) == 1: phadataset = arg filename = arg.get_filename() make_copy = False else: raise IOErr('badfile', arg, "PHACrateDataset obj") pha = _get_crate_by_blockname(phadataset, "SPECTRUM") if pha is None: pha = phadataset.get_crate(phadataset.get_current_crate()) if (pha.get_key('HDUCLAS1').value == 'SPECTRUM' or pha.get_key('HDUCLAS2').value == 'SPECTRUM'): pass else: pha = phadataset.get_crate(1) if (pha.get_key('HDUCLAS1').value == 'SPECTRUM' or pha.get_key('HDUCLAS2').value == 'SPECTRUM'): pass else: # If background maybe better to go on to next block? pha = None if use_background: # Used to read BKGs found in an additional block of # Chandra Level 3 PHA files for ii in range(phadataset.get_ncrates()): block = phadataset.get_crate(ii + 1) hduclas2 = block.get_key('HDUCLAS2') if hduclas2 is not None and hduclas2.value == 'BKG': pha = block if pha is None or pha.get_colnames() is None: raise IOErr('filenotfound', arg) keys = [ 'BACKFILE', 'ANCRFILE', 'RESPFILE', 'BACKSCAL', 'AREASCAL', 'EXPOSURE' ] keys_or_cols = ['BACKSCAL', 'BACKSCUP', 'BACKSCDN', 'AREASCAL'] datasets = [] # Calling phadataset.is_pha_type1() is unreliable when # both TYPE:I and TYPE:II keywords are in the header. # Here, I instead test for a column, SPEC_NUM, that can # *only* be present in Type II. SMD 05/15/13 if _try_col(pha, 'SPEC_NUM') is None: data = {} # Keywords data['exposure'] = _try_key(pha, 'EXPOSURE', SherpaFloat) # data['poisserr'] = _try_key(pha, 'POISSERR', bool) data['backfile'] = _try_key(pha, 'BACKFILE') data['arffile'] = _try_key(pha, 'ANCRFILE') data['rmffile'] = _try_key(pha, 'RESPFILE') # Keywords or columns for name in keys_or_cols: key = name.lower() data[key] = _try_key(pha, name, SherpaFloat) if data[key] is None: data[key] = _try_col(pha, name, make_copy) data['header'] = _get_meta_data(pha) for key in keys: try: data['header'].pop(key) except KeyError: pass # Columns if not pha.column_exists('CHANNEL'): raise IOErr('reqcol', 'CHANNEL', filename) data['channel'] = _require_col(pha, 'CHANNEL', make_copy, fix_type=True) # Make sure channel numbers, not indices if int(data['channel'][0]) == 0 or pha.get_column( 'CHANNEL').get_tlmin() == 0: data['channel'] = data['channel'] + 1 data['counts'] = None if pha.column_exists('COUNTS'): data['counts'] = _require_col(pha, 'COUNTS', make_copy, fix_type=True) else: if not pha.column_exists('RATE'): raise IOErr('reqcol', 'COUNTS or RATE', filename) data['counts'] = _require_col( pha, 'RATE', make_copy, fix_type=True) * data['exposure'] data['staterror'] = _try_col(pha, 'STAT_ERR', make_copy) data['syserror'] = _try_col(pha, 'SYS_ERR', make_copy) data['background_up'] = _try_col(pha, 'BACKGROUND_UP', make_copy, fix_type=True) data['background_down'] = _try_col(pha, 'BACKGROUND_DOWN', make_copy, fix_type=True) data['bin_lo'] = _try_col(pha, 'BIN_LO', make_copy, fix_type=True) data['bin_hi'] = _try_col(pha, 'BIN_HI', make_copy, fix_type=True) data['grouping'] = _try_col(pha, 'GROUPING', make_copy) data['quality'] = _try_col(pha, 'QUALITY', make_copy) datasets.append(data) else: # Type 2 PHA file support data = {} num = pha.get_nrows() # Keywords exposure = _try_key(pha, 'EXPOSURE', SherpaFloat) # poisserr = _try_key(pha, 'POISSERR', bool) backfile = _try_key(pha, 'BACKFILE') arffile = _try_key(pha, 'ANCRFILE') rmffile = _try_key(pha, 'RESPFILE') # Keywords or columns backscal = _try_key_list(pha, 'BACKSCAL', num) if backscal is None: backscal = _try_col_list(pha, 'BACKSCAL', num, make_copy) backscup = _try_key_list(pha, 'BACKSCUP', num) if backscup is None: backscup = _try_col_list(pha, 'BACKSCUP', num, make_copy) backscdn = _try_key_list(pha, 'BACKSCDN', num) if backscdn is None: backscdn = _try_col_list(pha, 'BACKSCDN', num, make_copy) areascal = _try_key_list(pha, 'AREASCAL', num) if areascal is None: areascal = _try_col_list(pha, 'AREASCAL', num, make_copy) # Columns if not pha.column_exists('CHANNEL'): raise IOErr('reqcol', 'CHANNEL', filename) channel = _require_col_list(pha, 'CHANNEL', num, make_copy, fix_type=True) # Make sure channel numbers, not indices for ii in range(num): if int(channel[ii][0]) == 0: channel[ii] += 1 counts = None if pha.column_exists('COUNTS'): counts = _require_col_list(pha, 'COUNTS', num, make_copy, fix_type=True) else: if not pha.column_exists('RATE'): raise IOErr('reqcol', 'COUNTS or RATE', filename) counts = _require_col_list( pha, 'RATE', num, make_copy, fix_type=True) * exposure staterror = _try_col_list(pha, 'STAT_ERR', num, make_copy) syserror = _try_col_list(pha, 'SYS_ERR', num, make_copy) background_up = _try_col_list(pha, 'BACKGROUND_UP', num, make_copy, fix_type=True) background_down = _try_col_list(pha, 'BACKGROUND_DOWN', num, make_copy, fix_type=True) bin_lo = _try_col_list(pha, 'BIN_LO', num, make_copy, fix_type=True) bin_hi = _try_col_list(pha, 'BIN_HI', num, make_copy, fix_type=True) grouping = _try_col_list(pha, 'GROUPING', num, make_copy) quality = _try_col_list(pha, 'QUALITY', num, make_copy) orders = _try_key_list(pha, 'TG_M', num) if orders is None: orders = _try_col_list(pha, 'TG_M', num, make_copy) parts = _try_key_list(pha, 'TG_PART', num) if parts is None: parts = _try_col_list(pha, 'TG_PART', num, make_copy) specnums = _try_col_list(pha, 'SPEC_NUM', num, make_copy) srcids = _try_col_list(pha, 'TG_SRCID', num, make_copy) # Iterate over all rows of channels, counts, errors, etc # Populate a list of dictionaries containing individual dataset info for (bscal, bscup, bscdn, arsc, chan, cnt, staterr, syserr, backup, backdown, binlo, binhi, grp, qual, ordr, prt, specnum, srcid) in izip(backscal, backscup, backscdn, areascal, channel, counts, staterror, syserror, background_up, background_down, bin_lo, bin_hi, grouping, quality, orders, parts, specnums, srcids): data = {} data['exposure'] = exposure # data['poisserr'] = poisserr data['backfile'] = backfile data['arffile'] = arffile data['rmffile'] = rmffile data['backscal'] = bscal data['backscup'] = bscup data['backscdn'] = bscdn data['areascal'] = arsc data['channel'] = chan data['counts'] = cnt data['staterror'] = staterr data['syserror'] = syserr data['background_up'] = backup data['background_down'] = backdown data['bin_lo'] = binlo data['bin_hi'] = binhi data['grouping'] = grp data['quality'] = qual data['header'] = _get_meta_data(pha) data['header']['TG_M'] = ordr data['header']['TG_PART'] = prt data['header']['SPEC_NUM'] = specnum data['header']['TG_SRCID'] = srcid for key in keys: try: data['header'].pop(key) except KeyError: pass datasets.append(data) if close_dataset: close_crate_dataset(phadataset) return datasets, filename
def get_pha_data(arg, make_copy=True, use_background=False): """ get_pha_data( filename [, make_copy=True [, use_background=False]]) get_pha_data( PHACrate [, make_copy=True [, use_background=False]]) """ filename = '' if type(arg) == str: #pha = pycrates.read_pha(arg, use_background) pha = _open_crate(pycrates.PHACrate, [arg, use_background]) filename = arg # Make a copy of the data, since we don't know that pycrates will # do something sensible wrt reference counting elif pycrates.is_pha(arg) == pycrates.dmSUCCESS: pha = arg filename = arg.get_filename() make_copy=False else: raise IOErr('badfile', arg, "PHACrate obj") if pha is None or pha.get_colnames() is None: raise IOErr('filenotfound', arg) keys = ['BACKFILE','ANCRFILE','RESPFILE', 'BACKSCAL','AREASCAL','EXPOSURE'] datasets = [] if (pha.pha1_type_flag or (not pha.pha1_type_flag and len(pha.spec_num.get_values())==1)): data = {} # Keywords data['exposure'] = _try_key(pha.exposure, SherpaFloat) #data['poisserr'] = _try_key(pha.poisserr, bool) data['backfile'] = _try_key(pha.backfile_key) data['arffile'] = _try_key(pha.ancrfile_key) data['rmffile'] = _try_key(pha.respfile_key) # Keywords or columns data['backscal'] = _try_key(pha.backscal_key, SherpaFloat) if data['backscal'] is None: data['backscal'] = _try_col(pha.backscal, make_copy) data['backscup'] = _try_key(pha.backscup_key, SherpaFloat) if data['backscup'] is None: data['backscup'] = _try_col(pha.backscup, make_copy) data['backscdn'] = _try_key(pha.backscdn_key, SherpaFloat) if data['backscdn'] is None: data['backscdn'] = _try_col(pha.backscdn, make_copy) data['areascal'] = _try_key(pha.areascal_key, SherpaFloat) if data['areascal'] is None: data['areascal'] = _try_col(pha.areascal, make_copy) data['header'] = _get_meta_data(pha) for key in keys: try: data['header'].pop(key) except KeyError: pass # Columns if pha.channel is None: raise IOErr('reqcol', 'CHANNEL', filename) data['channel'] = _require_col(pha.channel, make_copy, fix_type=True) # Make sure channel numbers, not indices if pha.channel.get_tlmin() == 0: data['channel'] = data['channel']+1 data['counts'] = None if pha.counts is not None: data['counts'] = _require_col(pha.counts, make_copy, fix_type=True) else: if pha.rate is None: raise IOErr('reqcol', 'COUNTS or RATE', filename) data['counts'] = _require_col(pha.rate, make_copy, fix_type=True)*data['exposure'] data['staterror'] = _try_col(pha.stat_err, make_copy) data['syserror'] = _try_col(pha.sys_err, make_copy) data['background_up'] = _try_col(pha.background_up, make_copy, fix_type=True) data['background_down'] = _try_col(pha.background_down, make_copy, fix_type=True) data['bin_lo'] = _try_col(pha.bin_lo, make_copy,fix_type=True) data['bin_hi'] = _try_col(pha.bin_hi, make_copy,fix_type=True) data['grouping'] = _try_col(pha.grouping, make_copy) data['quality'] = _try_col(pha.quality, make_copy) datasets.append(data) else: # Type 2 PHA file support data = {} num = pha.spec_num.get_nsets() # Keywords exposure = _try_key(pha.exposure, SherpaFloat) #poisserr = _try_key(pha.poisserr, bool) backfile = _try_key(pha.backfile_key) arffile = _try_key(pha.ancrfile_key) rmffile = _try_key(pha.respfile_key) # Keywords or columns backscal = _try_key_list(pha.backscal_key, num) if pha.backscal_key is None: backscal = _try_col_list(pha.backscal, num, make_copy) backscup = _try_key_list(pha.backscup_key, num) if pha.backscup_key is None: backscup = _try_col_list(pha.backscup, num, make_copy) backscdn = _try_key_list(pha.backscdn_key, num) if pha.backscdn_key is None: backscdn = _try_col_list(pha.backscdn, num, make_copy) areascal = _try_key_list(pha.areascal_key, num) if pha.areascal_key is None: areascal = _try_col_list(pha.areascal, num, make_copy) # Columns if pha.channel is None: raise IOErr('reqcol', 'CHANNEL', filename) channel = _require_col_list(pha.channel, num, make_copy, fix_type=True) counts = None if pha.counts is not None: counts = _require_col_list(pha.counts, num, make_copy, fix_type=True) else: if pha.rate is None: raise IOErr('reqcol', 'COUNTS or RATE', filename) counts = _require_col_list(pha.rate, num, make_copy, fix_type=True) * exposure staterror = _try_col_list(pha.stat_err, num, make_copy) syserror = _try_col_list(pha.sys_err, num, make_copy) background_up = _try_col_list(pha.background_up, num, make_copy, fix_type=True) background_down = _try_col_list(pha.background_down, num, make_copy, fix_type=True) bin_lo = _try_col_list(pha.bin_lo, num, make_copy, fix_type=True) bin_hi = _try_col_list(pha.bin_hi, num, make_copy, fix_type=True) grouping = _try_col_list(pha.grouping, num, make_copy) quality = _try_col_list(pha.quality, num, make_copy) orders = _try_key_list(pha.tg_m_key,num) if pha.tg_m_key is None: orders = _try_col_list(pha.tg_m, num, make_copy) parts = _try_key_list(pha.tg_part_key,num) if pha.tg_part_key is None: parts = _try_col_list(pha.tg_part, num, make_copy) specnums = _try_col_list(pha.spec_num, num, make_copy) srcids = _try_col_list(pha.tg_srcid, num, make_copy) # Iterate over all rows of channels, counts, errors, etc # Populate a list of dictionaries containing individual dataset info for (bscal, bscup, bscdn, arsc, chan, cnt, staterr, syserr, backup, backdown, binlo, binhi, grp, qual, ordr, prt, specnum, srcid ) in izip(backscal, backscup, backscdn, areascal, channel, counts, staterror, syserror, background_up, background_down, bin_lo, bin_hi, grouping, quality, orders, parts, specnums, srcids): data = {} data['exposure'] = exposure #data['poisserr'] = poisserr data['backfile'] = backfile data['arffile'] = arffile data['rmffile'] = rmffile data['backscal'] = bscal data['backscup'] = bscup data['backscdn'] = bscdn data['areascal'] = arsc data['channel'] = chan data['counts'] = cnt data['staterror'] = staterr data['syserror'] = syserr data['background_up'] = backup data['background_down'] = backdown data['bin_lo'] = binlo data['bin_hi'] = binhi data['grouping'] = grp data['quality'] = qual data['header'] = _get_meta_data(pha) data['header']['TG_M'] = ordr data['header']['TG_PART'] = prt data['header']['SPEC_NUM'] = specnum data['header']['TG_SRCID'] = srcid for key in keys: try: data['header'].pop(key) except KeyError: pass datasets.append(data) return datasets, filename