def read_cbf_image(self, cbf_image): from cbflib_adaptbx import uncompress import binascii from scitbx.array_family import flex start_tag = binascii.unhexlify("0c1a04d5") data = self.open_file(cbf_image, "rb").read() data_offset = data.find(start_tag) + 4 cbf_header = data[: data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split("\n"): if "X-Binary-Size-Fastest-Dimension" in record: fast = int(record.split()[-1]) elif "X-Binary-Size-Second-Dimension" in record: slow = int(record.split()[-1]) elif "X-Binary-Number-of-Elements" in record: length = int(record.split()[-1]) elif "X-Binary-Size:" in record: size = int(record.split()[-1]) assert length == fast * slow pixel_values = uncompress( packed=data[data_offset : data_offset + size], fast=fast, slow=slow ) return pixel_values
def read_cbf_image_as_double(cbf_image): from cbflib_adaptbx import uncompress import binascii start_tag = binascii.unhexlify('0c1a04d5') data = open(cbf_image, 'rb').read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split('\n'): if 'X-Binary-Size-Fastest-Dimension' in record: fast = int(record.split()[-1]) elif 'X-Binary-Size-Second-Dimension' in record: slow = int(record.split()[-1]) elif 'X-Binary-Number-of-Elements' in record: length = int(record.split()[-1]) elif 'X-Binary-Size:' in record: size = int(record.split()[-1]) assert (length == fast * slow) pixel_values = uncompress(packed=data[data_offset:data_offset + size], fast=fast, slow=slow) return pixel_values.as_double()
def read_xds_calibration_file(calibration_file): '''Read XDS calibration file, return as flex array.''' from scitbx.array_family import flex from cbflib_adaptbx import uncompress, compress import binascii start_tag = binascii.unhexlify('0c1a04d5') data = open(calibration_file, 'rb').read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split('\n'): if 'X-Binary-Size-Fastest-Dimension' in record: fast = int(record.split()[-1]) elif 'X-Binary-Size-Second-Dimension' in record: slow = int(record.split()[-1]) elif 'X-Binary-Number-of-Elements' in record: length = int(record.split()[-1]) elif 'X-Binary-Size:' in record: size = int(record.split()[-1]) assert(length == fast * slow) pixel_values = uncompress(packed = data[data_offset:data_offset + size], fast = fast, slow = slow) return pixel_values
def read_cbf_image_as_double(cbf_image): from cbflib_adaptbx import uncompress import binascii start_tag = binascii.unhexlify('0c1a04d5') data = open(cbf_image, 'rb').read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split('\n'): if 'X-Binary-Size-Fastest-Dimension' in record: fast = int(record.split()[-1]) elif 'X-Binary-Size-Second-Dimension' in record: slow = int(record.split()[-1]) elif 'X-Binary-Number-of-Elements' in record: length = int(record.split()[-1]) elif 'X-Binary-Size:' in record: size = int(record.split()[-1]) assert(length == fast * slow) pixel_values = uncompress(packed = data[data_offset:data_offset + size], fast = fast, slow = slow) return pixel_values.as_double()
def read_cbf_image(cbf_image): from cbflib_adaptbx import uncompress import binascii # from scitbx.array_family import flex start_tag = binascii.unhexlify("0c1a04d5") data = open(cbf_image, "rb").read() data_offset = data.find(start_tag) + 4 cbf_header = data[: data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split("\n"): if "X-Binary-Size-Fastest-Dimension" in record: fast = int(record.split()[-1]) elif "X-Binary-Size-Second-Dimension" in record: slow = int(record.split()[-1]) elif "X-Binary-Number-of-Elements" in record: length = int(record.split()[-1]) elif "X-Binary-Size:" in record: size = int(record.split()[-1]) assert length == fast * slow pixel_values = uncompress(packed=data[data_offset : data_offset + size], fast=fast, slow=slow) return pixel_values
def get_raw_data_from_file(imageset, i): """Use cbflib_adaptbx directly to access the raw data array rather than through the imageset, in order to work for multi-panel detectors and other situations where the format class modifies the raw array""" from cbflib_adaptbx import uncompress import binascii file_name = imageset.get_image_identifier(i) with open(file_name, "rb") as cbf: data = cbf.read() start_tag = binascii.unhexlify("0c1a04d5") data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = slow = length = 0 for record in cbf_header.split("\n"): if "X-Binary-Size-Fastest-Dimension" in record: fast = int(record.split()[-1]) elif "X-Binary-Size-Second-Dimension" in record: slow = int(record.split()[-1]) elif "X-Binary-Size:" in record: xbsize_record = record length = int(record.split()[-1]) values = uncompress(packed=data[data_offset:data_offset + length], fast=fast, slow=slow) return (values, )
def _read_cbf_image(self): from cbflib_adaptbx import uncompress start_tag = binascii.unhexlify("0c1a04d5") with self.open_file(self._image_file, "rb") as fh: data = fh.read() data_offset = data.find(start_tag) + 4 cbf_header = self._parse_cbf_header( data[: data_offset - 4].decode("ascii", "ignore") ) if cbf_header["byte_offset"]: pixel_values = uncompress( packed=data[data_offset : data_offset + cbf_header["size"]], fast=cbf_header["fast"], slow=cbf_header["slow"], ) elif cbf_header["no_compression"]: from boost.python import streambuf from dxtbx import read_int32 from scitbx.array_family import flex assert len(self.get_detector()) == 1 with self.open_file(self._image_file) as f: f.read(data_offset) pixel_values = read_int32(streambuf(f), cbf_header["length"]) pixel_values.reshape(flex.grid(cbf_header["slow"], cbf_header["fast"])) else: raise ValueError( "Compression of type other than byte_offset or none is not supported (contact authors)" ) return pixel_values
def read_xds_calibration_file(calibration_file): """Read XDS calibration file, return as flex array.""" from scitbx.array_family import flex from cbflib_adaptbx import uncompress, compress import binascii start_tag = binascii.unhexlify("0c1a04d5") data = open(calibration_file, "rb").read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split("\n"): if "X-Binary-Size-Fastest-Dimension" in record: fast = int(record.split()[-1]) elif "X-Binary-Size-Second-Dimension" in record: slow = int(record.split()[-1]) elif "X-Binary-Number-of-Elements" in record: length = int(record.split()[-1]) assert length == fast * slow pixel_values = uncompress(packed=data[data_offset:], fast=fast, slow=slow) return pixel_values
def uncompress_in_place(self): if self.data_type == "compressed": decompressed_data = uncompress(packed=self.data, fast=self.size_fast, slow=self.size_slow) self.data = decompressed_data self.data_type = "uncompressed" assert self.data_type=="uncompressed" return self.data
def uncompress_in_place(self): if self.data_type == "compressed": decompressed_data = uncompress(packed=self.data, fast=self.size_fast, slow=self.size_slow) self.data = decompressed_data self.data_type = "uncompressed" assert self.data_type=="uncompressed" return self.data
def recompute_BKGINIT(bkginit_in, init_lp, bkginit_out): start_tag = binascii.unhexlify('0c1a04d5') data = open(bkginit_in, 'rb').read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split('\n'): if 'X-Binary-Size-Fastest-Dimension' in record: fast = int(record.split()[-1]) elif 'X-Binary-Size-Second-Dimension' in record: slow = int(record.split()[-1]) elif 'X-Binary-Number-of-Elements' in record: length = int(record.split()[-1]) assert(length == fast * slow) pixel_values = uncompress(packed = data[data_offset:], fast = fast, slow = slow) untrusted = [] for record in open(init_lp): if 'UNTRUSTED_RECTANGLE=' in record: untrusted.append(map(int, record.replace('.', ' ').split()[1:5])) modified_pixel_values = copy.deepcopy(pixel_values) for s in range(5, slow - 5): y = s + 1 for f in range(5, fast - 5): x = f + 1 trusted = True for x0, x1, y0, y1 in untrusted: if (x >= x0) and (x <= x1) and (y >= y0) and (y <= y1): trusted = False break if trusted: pixel = pixel_values[s * fast + f] if pixel < 0: pixels = [] for j in range(-2, 3): for i in range(-2, 3): p = pixel_values[(s + j) * fast + f + i] if p > 0: pixels.append(p) modified_pixel_values[s * fast + f] = int( sum(pixels) / len(pixels)) open(bkginit_out, 'wb').write(cbf_header + start_tag + compress(modified_pixel_values)) return
def read_cbf_image(self, cbf_image): from cbflib_adaptbx import uncompress import binascii start_tag = binascii.unhexlify("0c1a04d5") data = self.open_file(cbf_image, "rb").read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 byte_offset = False no_compression = False for record in cbf_header.split("\n"): if "X-Binary-Size-Fastest-Dimension" in record: fast = int(record.split()[-1]) elif "X-Binary-Size-Second-Dimension" in record: slow = int(record.split()[-1]) elif "X-Binary-Number-of-Elements" in record: length = int(record.split()[-1]) elif "X-Binary-Size:" in record: size = int(record.split()[-1]) elif "conversions" in record: if "x-CBF_BYTE_OFFSET" in record: byte_offset = True elif "x-CBF_NONE" in record: no_compression = True assert length == fast * slow if byte_offset: pixel_values = uncompress(packed=data[data_offset:data_offset + size], fast=fast, slow=slow) elif no_compression: from boost.python import streambuf try: from dxtbx.ext import read_int32 except ImportError: from dxtbx import read_int32 from scitbx.array_family import flex assert len(self.get_detector()) == 1 f = self.open_file(self._image_file) f.read(data_offset) pixel_values = read_int32(streambuf(f), int(slow * fast)) pixel_values.reshape(flex.grid(slow, fast)) else: raise ValueError( "Uncompression of type other than byte_offset or none " " is not supported (contact authors)") return pixel_values
def recompute_BKGINIT(bkginit_in, init_lp, bkginit_out): start_tag = binascii.unhexlify('0c1a04d5') data = open(bkginit_in, 'rb').read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split('\n'): if 'X-Binary-Size-Fastest-Dimension' in record: fast = int(record.split()[-1]) elif 'X-Binary-Size-Second-Dimension' in record: slow = int(record.split()[-1]) elif 'X-Binary-Number-of-Elements' in record: length = int(record.split()[-1]) assert (length == fast * slow) pixel_values = uncompress(packed=data[data_offset:], fast=fast, slow=slow) untrusted = [] for record in open(init_lp): if 'UNTRUSTED_RECTANGLE=' in record: untrusted.append(map(int, record.replace('.', ' ').split()[1:5])) modified_pixel_values = copy.deepcopy(pixel_values) for s in range(5, slow - 5): y = s + 1 for f in range(5, fast - 5): x = f + 1 trusted = True for x0, x1, y0, y1 in untrusted: if (x >= x0) and (x <= x1) and (y >= y0) and (y <= y1): trusted = False break if trusted: pixel = pixel_values[s * fast + f] if pixel < 0: pixels = [] for j in range(-2, 3): for i in range(-2, 3): p = pixel_values[(s + j) * fast + f + i] if p > 0: pixels.append(p) modified_pixel_values[s * fast + f] = int( sum(pixels) / len(pixels)) open(bkginit_out, 'wb').write(cbf_header + start_tag + compress(modified_pixel_values)) return
def squishGain(cbf_file, out_name, force_gain=None): start_tag = binascii.unhexlify("0c1a04d5") data = cbf_file.read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split("\n"): if "X-Binary-Size-Fastest-Dimension" in record: fast = int(record.split()[-1]) elif "X-Binary-Size-Second-Dimension" in record: slow = int(record.split()[-1]) elif "X-Binary-Size:" in record: xbsize_record = record length = int(record.split()[-1]) values = uncompress(packed=data[data_offset:data_offset + length], fast=fast, slow=slow) # remainder of the file, contains another CIF-BINARY-FORMAT-SECTION that looks # like just zero padding. tail = data[data_offset + length:] modified = copy.deepcopy(values) if force_gain: new_val = int(1000 * force_gain) else: # calculate the mean of values that are greater than zero. This is close # to 1000 times the "MEAN GAIN VALUE" reported in INIT.LP dval1d = modified.as_1d().as_double() mval = flex.mean(dval1d.select(dval1d > 0)) new_val = int(mval) # Set this value everywhere that is not a masked value marked by -3 print("Setting flat gain of {0}".format(new_val / 1000.0)) modified.set_selected(modified >= 0, new_val) # Compress the data compressed = compress(modified) nbytes = len(compressed) # Update the header pre, post = cbf_header.split(xbsize_record) new_xbsize_record = "X-Binary-Size:{0:10d}".format(nbytes) if xbsize_record.endswith("\r"): new_xbsize_record += "\r" new_cbf_header = pre + new_xbsize_record + post # Write the file out. open(out_name, "wb").write(new_cbf_header + start_tag + compressed + tail)
def multiplyCounts(cbf_file, out_name, multiplier): start_tag = binascii.unhexlify("0c1a04d5") data = cbf_file.read() data_offset = data.find(start_tag) + 4 cbf_header = data[:data_offset - 4] fast = 0 slow = 0 length = 0 for record in cbf_header.split("\n"): if "X-Binary-Size-Fastest-Dimension" in record: fast = int(record.split()[-1]) elif "X-Binary-Size-Second-Dimension" in record: slow = int(record.split()[-1]) elif "X-Binary-Size:" in record: xbsize_record = record length = int(record.split()[-1]) values = uncompress(packed=data[data_offset:data_offset + length], fast=fast, slow=slow) # remainder of the file, contains another CIF-BINARY-FORMAT-SECTION that looks # like just zero padding. tail = data[data_offset + length:] # multiply all positive values modified = copy.deepcopy(values).as_1d() sel = modified > 0 new_val = modified.select(sel) * multiplier modified.set_selected(sel, new_val) # reshape modified.reshape(values.accessor()) # Compress the data compressed = compress(modified) nbytes = len(compressed) # Update the header pre, post = cbf_header.split(xbsize_record) new_xbsize_record = "X-Binary-Size:{0:10d}".format(nbytes) if xbsize_record.endswith("\r"): new_xbsize_record += "\r" new_cbf_header = pre + new_xbsize_record + post # Write the file out. open(out_name, "wb").write(new_cbf_header + start_tag + compressed + tail)
def read_cbf_image(cbf_image): start_tag = binascii.unhexlify("0c1a04d5") with open(cbf_image, "rb") as fh: data = fh.read() data_offset = data.find(start_tag) + 4 cbf_header = dxtbx.format.FormatCBF.FormatCBF._parse_cbf_header( data[:data_offset - 4].decode("ascii", "ignore")) pixel_values = uncompress( packed=data[data_offset:data_offset + cbf_header["size"]], fast=cbf_header["fast"], slow=cbf_header["slow"], ) return pixel_values
def _read_cbf_image(self): start_tag = binascii.unhexlify("0c1a04d5") with self.open_file(self._image_file, "rb") as fh: data = fh.read() data_offset = data.find(start_tag) + 4 cbf_header = self._parse_cbf_header(data[:data_offset - 4].decode( "ascii", "ignore")) pixel_values = uncompress( packed=data[data_offset:data_offset + cbf_header["size"]], fast=cbf_header["fast"], slow=cbf_header["slow"], ) return pixel_values
def basic_tests(verbose=True): initial_intdata = create_random_data_with_gaussian_distribution(0.0,100.0) #special deltas to test the compression algorithm addresses = [3,6,9,12,15,18] deltas = [-127,128,-32767,32768,-2147483647,2147483647] for x in xrange(6): initial_intdata[addresses[x]-1]=0 initial_intdata[addresses[x]]=deltas[x] if verbose: P=Profiler("compress") array_shape = initial_intdata.focus() if verbose: print array_shape compressed = compress(initial_intdata) if verbose: print len(compressed) if verbose: P=Profiler("uncompress") decompressed_dat = uncompress(packed=compressed, fast=array_shape[1], slow=array_shape[0]) if verbose: del P assert assert_equal(initial_intdata, decompressed_dat)
def basic_tests(verbose=True): initial_intdata = create_random_data_with_gaussian_distribution(0.0, 100.0) #special deltas to test the compression algorithm addresses = [3, 6, 9, 12, 15, 18] deltas = [-127, 128, -32767, 32768, -2147483647, 2147483647] for x in range(6): initial_intdata[addresses[x] - 1] = 0 initial_intdata[addresses[x]] = deltas[x] if verbose: P = Profiler("compress") array_shape = initial_intdata.focus() if verbose: print(array_shape) compressed = compress(initial_intdata) if verbose: print(len(compressed)) if verbose: P = Profiler("uncompress") decompressed_dat = uncompress(packed=compressed, fast=array_shape[1], slow=array_shape[0]) if verbose: del P assert assert_equal(initial_intdata, decompressed_dat)