def read_and_compress_table(parser, table, debug): """Read data from a FITS file and save it into a FITS binary table The data are read from the FITS file specified in the "table" section of the "parser" object (an instance of ConfigurationParser). If "debug" is true, save additional information (useful for debugging) in the table.""" input_file_name = os.path.normpath(parser.get(table, 'file')) cur_hdu = None with pyfits.open(input_file_name) as input_file: hdu, column = get_hdu_and_column_from_schema(parser, table, input_file) compression = parser.get(table, 'compression') log.info('compressing file %s (HDU %s, column %s) ' 'into table "%s", ' 'compression is "%s"', input_file_name, str(hdu), str(column), table, compression) samples_format = input_file[hdu].columns.formats[column] samples = ppc.to_native_endianness(input_file[hdu].data.field(column)) if parser.has_option(table, 'datatype'): samples = np.array(samples, dtype=parser.get(table, 'datatype')) cur_hdu, num_of_bytes, elapsed_time = compress_to_FITS_table(parser, table, samples_format, samples, debug) cur_hdu.name = table cur_hdu.header['PCNUMSA'] = (len(samples), 'Number of uncompressed samples') cur_hdu.header['PCCOMPR'] = (compression, 'Polycomp compression algorithm') cur_hdu.header['PCSRCTP'] = (str(samples.dtype), 'Original NumPy type of the data') cur_hdu.header['PCUNCSZ'] = (samples.itemsize * samples.size, 'Size of the uncompressed data [bytes]') cur_hdu.header['PCCOMSZ'] = (num_of_bytes, 'Size of the compressed data [bytes]') cur_hdu.header['PCTIME'] = (elapsed_time, 'Time used for compression [s]') cr = float(cur_hdu.header['PCUNCSZ']) / float(cur_hdu.header['PCCOMSZ']) cur_hdu.header['PCCR'] = (cr, 'Compression ratio') log.info('table "%s" compressed, %s compressed to %s (cr: %.4f)', table, humanize_size(cur_hdu.header['PCUNCSZ']), humanize_size(cur_hdu.header['PCCOMSZ']), cr) return cur_hdu
def read_and_compress_table(parser, table, debug): """Read data from a FITS file and save it into a FITS binary table The data are read from the FITS file specified in the "table" section of the "parser" object (an instance of ConfigurationParser). If "debug" is true, save additional information (useful for debugging) in the table.""" input_file_name = os.path.normpath(parser.get(table, 'file')) cur_hdu = None with pyfits.open(input_file_name) as input_file: hdu, column = get_hdu_and_column_from_schema(parser, table, input_file) compression = parser.get(table, 'compression') log.info( 'compressing file %s (HDU %s, column %s) ' 'into table "%s", ' 'compression is "%s"', input_file_name, str(hdu), str(column), table, compression) samples_format = input_file[hdu].columns.formats[column] samples = ppc.to_native_endianness(input_file[hdu].data.field(column)) if parser.has_option(table, 'datatype'): samples = np.array(samples, dtype=parser.get(table, 'datatype')) cur_hdu, num_of_bytes, elapsed_time = compress_to_FITS_table( parser, table, samples_format, samples, debug) cur_hdu.name = table cur_hdu.header['PCNUMSA'] = (len(samples), 'Number of uncompressed samples') cur_hdu.header['PCCOMPR'] = (compression, 'Polycomp compression algorithm') cur_hdu.header['PCSRCTP'] = (str(samples.dtype), 'Original NumPy type of the data') cur_hdu.header['PCUNCSZ'] = (samples.itemsize * samples.size, 'Size of the uncompressed data [bytes]') cur_hdu.header['PCCOMSZ'] = (num_of_bytes, 'Size of the compressed data [bytes]') cur_hdu.header['PCTIME'] = (elapsed_time, 'Time used for compression [s]') cr = float(cur_hdu.header['PCUNCSZ']) / float( cur_hdu.header['PCCOMSZ']) cur_hdu.header['PCCR'] = (cr, 'Compression ratio') log.info('table "%s" compressed, %s compressed to %s (cr: %.4f)', table, humanize_size(cur_hdu.header['PCUNCSZ']), humanize_size(cur_hdu.header['PCCOMSZ']), cr) return cur_hdu
def decompress_quant(hdu): "Decompress an HDU containing quantized data" quant = ppc.QuantParams(element_size=hdu.header['PCELEMSZ'], bits_per_sample=hdu.header['PCBITSPS']) quant.set_normalization(normalization=hdu.header['PCNORM'], offset=hdu.header['PCOFS']) size_to_fits_fmt = {4: '1E', 8: '1D'} compr_samples = ppc.to_native_endianness(hdu.data.field(0)) try: return (quant.decompress(compr_samples, hdu.header['PCNUMSA']), size_to_fits_fmt[quant.element_size()]) except KeyError: log.error('unable to handle floating-point types which ' 'are %d bytes wide, allowed sizes are %s', quant.element_size(), str(list(size_to_fits_fmt.keys()))) sys.exit(1)
def decompress_quant(hdu): "Decompress an HDU containing quantized data" quant = ppc.QuantParams(element_size=hdu.header['PCELEMSZ'], bits_per_sample=hdu.header['PCBITSPS']) quant.set_normalization(normalization=hdu.header['PCNORM'], offset=hdu.header['PCOFS']) size_to_fits_fmt = {4: '1E', 8: '1D'} compr_samples = ppc.to_native_endianness(hdu.data.field(0)) try: return (quant.decompress(compr_samples, hdu.header['PCNUMSA']), size_to_fits_fmt[quant.element_size()]) except KeyError: log.error( 'unable to handle floating-point types which ' 'are %d bytes wide, allowed sizes are %s', quant.element_size(), str(list(size_to_fits_fmt.keys()))) sys.exit(1)