def save_param_file (self, file_name, sources=None, extra_phil="", diff_only=False, save_state=False, replace_path=None) : if sources is None : sources = [] if extra_phil != "" : self.merge_phil(phil_string=extra_phil, rebuild_index=False) final_phil = self.master_phil.fetch(sources=[self.working_phil] + list(sources)) if diff_only : output_phil = self.master_phil.fetch_diff(source=final_phil) else : output_phil = final_phil if (replace_path is not None) : substitute_directory_name( phil_object=output_phil, path_name=replace_path, sub_name="LIBTBX_BASE_DIR") try : f = smart_open.for_writing(file_name, "w") except IOError, e : raise Sorry(str(e))
def save_param_file(self, file_name, sources=None, extra_phil="", diff_only=False, save_state=False, replace_path=None): if sources is None: sources = [] if extra_phil != "": self.merge_phil(phil_string=extra_phil, rebuild_index=False) final_phil = self.master_phil.fetch(sources=[self.working_phil] + list(sources)) if diff_only: output_phil = self.master_phil.fetch_diff(source=final_phil) else: output_phil = final_phil if (replace_path is not None): substitute_directory_name(phil_object=output_phil, path_name=replace_path, sub_name="LIBTBX_BASE_DIR") try: f = smart_open.for_writing(file_name, "w") except IOError, e: raise Sorry(str(e))
def save_param_file(self, file_name, sources=None, extra_phil="", diff_only=False, save_state=False, replace_path=None): if sources is None: sources = [] if extra_phil != "": self.merge_phil(phil_string=extra_phil, rebuild_index=False) final_phil = self.master_phil.fetch(sources=[self.working_phil] + list(sources)) if diff_only: output_phil = self.master_phil.fetch_diff(source=final_phil) else: output_phil = final_phil if (replace_path is not None): substitute_directory_name(phil_object=output_phil, path_name=replace_path, sub_name="LIBTBX_BASE_DIR") try: f = smart_open.for_writing(file_name, "w") except IOError as e: raise Sorry(str(e)) else: if (replace_path is not None): f.write("LIBTBX_BASE_DIR = \"%s\"\n" % replace_path) output_phil.show(out=f) f.close() if save_state: cache_file = "%s_cache.pkl" % file_name easy_pickle.dump(cache_file, self)
def as_msgpack_file(self, filename): ''' Write the reflection table to file in msgpack format ''' from libtbx import smart_open with smart_open.for_writing(filename, 'wb') as outfile: outfile.write(self.as_msgpack())
def to_smv_format_py(self,fileout,intfile_scale=0.0,debug_x=-1,debug_y=-1, rotmat=False,extra=None,verbose=False,gz=False): byte_order = "little_endian"; #recast the image file write to Python to afford extra options: rotmat, extra, gz if gz: from libtbx.smart_open import for_writing outfile = for_writing(file_name=fileout+".gz", gzip_mode="wb") else: outfile = open(fileout,"wb"); outfile.write(("{\nHEADER_BYTES=1024;\nDIM=2;\nBYTE_ORDER=%s;\nTYPE=unsigned_short;\n"%byte_order).encode()); outfile.write(b"SIZE1=%d;\nSIZE2=%d;\nPIXEL_SIZE=%g;\nDISTANCE=%g;\n"%( self.detpixels_fastslow[0],self.detpixels_fastslow[1],self.pixel_size_mm,self.distance_mm)); outfile.write(b"WAVELENGTH=%g;\n"%self.wavelength_A); outfile.write(b"BEAM_CENTER_X=%g;\nBEAM_CENTER_Y=%g;\n"%self.beam_center_mm); outfile.write(b"ADXV_CENTER_X=%g;\nADXV_CENTER_Y=%g;\n"%self.adxv_beam_center_mm); outfile.write(b"MOSFLM_CENTER_X=%g;\nMOSFLM_CENTER_Y=%g;\n"%self.mosflm_beam_center_mm); outfile.write(b"DENZO_X_BEAM=%g;\nDENZO_Y_BEAM=%g;\n"%self.denzo_beam_center_mm); outfile.write(b"DIALS_ORIGIN=%g,%g,%g\n"%self.dials_origin_mm); outfile.write(b"XDS_ORGX=%g;\nXDS_ORGY=%g;\n"%self.XDS_ORGXY); outfile.write(b"CLOSE_DISTANCE=%g;\n"%self.close_distance_mm); outfile.write(b"PHI=%g;\nOSC_START=%g;\nOSC_RANGE=%g;\n"%(self.phi_deg,self.phi_deg,self.osc_deg)); outfile.write(b"TIME=%g;\n"%self.exposure_s); outfile.write(b"TWOTHETA=%g;\n"%self.detector_twotheta_deg); outfile.write(b"DETECTOR_SN=000;\n"); outfile.write(b"ADC_OFFSET=%g;\n"%self.adc_offset_adu); outfile.write(b"BEAMLINE=fake;\n"); if rotmat: from scitbx.matrix import sqr RSABC = sqr(self.Amatrix).inverse().transpose() outfile.write( ("DIRECT_SPACE_ABC=%s;\n"%(",".join([repr(a) for a in RSABC.elems]))).encode() ) if extra is not None: outfile.write(extra.encode()) outfile.write(b"}\f"); assert outfile.tell() < 1024, "SMV header too long, please edit this code and ask for more bytes." while ( outfile.tell() < 1024 ): outfile.write(b" ") from six import PY3 if PY3: # Python3-compatible method for populating the output buffer. # Py2 implementation is more elegant in that the streambuf may be passed to C++, # and the data are gzipped in chunks (default 1024). Py3 will not accept this method # as it is PyString-based, with no converter mechanisms to bring data into PyBytes. # The Py3 method brings the full data in one chunk into PyBytes and then populates # the output buffer in Python rather than C++. image_bytes = self.raw_pixels_unsigned_short_as_python_bytes(intfile_scale,debug_x,debug_y) ptr = 0; nbytes = len(image_bytes) while (ptr < nbytes): # chunked output necessary to prevent intermittent MemoryError outfile.write(image_bytes[ptr : min(ptr + 65536, nbytes)]) ptr += 65536 outfile.close(); return from boost_adaptbx.boost.python import streambuf self.to_smv_format_streambuf(streambuf(outfile),intfile_scale,debug_x,debug_y) outfile.close();
def tst_dxtbx_compressed(): import libtbx.load_env try: dials_regression = libtbx.env.dist_path('dials_regression') except KeyError: print 'FAIL: dials_regression not configured' return import os from dxtbx.format.Registry import Registry from dials_regression.image_examples.get_all_working_images import \ get_all_working_images # test that reading gz or bz2 compressed files works: it doesn't! from libtbx import smart_open from libtbx.test_utils import open_tmp_directory import shutil tmp_dir = open_tmp_directory() print tmp_dir for directory, image in get_all_working_images(): file_path = os.path.join(dials_regression, 'image_examples', directory, image) for ext in ('.gz', '.bz2')[:]: compressed_path = os.path.join(tmp_dir, os.path.basename(file_path)) + ext with open(file_path, 'rb') as f_in, smart_open.for_writing( compressed_path) as f_out: shutil.copyfileobj(f_in, f_out) print file_path, compressed_path format = Registry.find(compressed_path) try: i = format(compressed_path) except Exception: print 'Error reading compressed file: %s' % compressed_path import traceback traceback.print_exc() else: print 'Successfully read compressed file: %s' % compressed_path det = i.get_detector() if det is not None: size = det[0].get_image_size() b = i.get_beam() g = i.get_goniometer() s = i.get_scan() try: d = i.get_raw_data() except IOError: pass print 'OK'
def as_pickle(self, filename): ''' Write the reflection table as a pickle file. :param filename: The output filename ''' import six.moves.cPickle as pickle from libtbx import smart_open with smart_open.for_writing(filename, 'wb') as outfile: pickle.dump(self, outfile, protocol=pickle.HIGHEST_PROTOCOL)
def as_pickle(self, filename): ''' Write the reflection table as a pickle file. :param filename: The output filename ''' import cPickle as pickle from libtbx import smart_open with smart_open.for_writing(filename, 'wb') as outfile: pickle.dump(self, outfile, protocol=pickle.HIGHEST_PROTOCOL)
def as_json(self, filename=None, compact=False, n_digits=None): import json d = self.as_dict(n_digits=n_digits) if compact: text = json.dumps(d, separators=(',',':'), ensure_ascii=True) else: text = json.dumps(d, indent=2, ensure_ascii=True) if filename is not None: from libtbx import smart_open with smart_open.for_writing(filename) as f: f.write(text) else: return text
def smart_load_dictionary(name=None, file_path=None, url=None, registry_location=cifdic_register_url, save_local=False, store_dir=None): from iotbx import cif assert [name, file_path, url].count(None) < 3 cif_dic = None if store_dir is None: store_dir = libtbx.env.under_dist(module_name='iotbx', path='cif/dictionaries') if name is not None and [file_path, url].count(None) == 2: if file_path is None: if os.path.isfile(name): file_path = name else: file_path = os.path.join(store_dir, name) if not os.path.isfile(file_path): gzip_path = file_path + '.gz' if os.path.isfile(gzip_path): if save_local: gz = smart_open.for_reading(gzip_path) f = smart_open.for_writing(file_path) shutil.copyfileobj(gz, f) gz.close() f.close() else: file_path = gzip_path if file_path is not None and os.path.isfile(file_path): file_object = smart_open.for_reading(file_path) cif_dic = dictionary(cif.reader(file_object=file_object).model()) file_object.close() else: if url is None: url = locate_dictionary(name, registry_location=registry_location) file_object = urlopen(url) if save_local: if name is None: name = os.path.basename(url) f = open(os.path.join(store_dir, name), 'wb') shutil.copyfileobj(file_object, f) f.close() cif_dic = dictionary( cif.reader(file_path=os.path.join(store_dir, name)).model()) else: cif_dic = dictionary(cif.reader(file_object=file_object).model()) assert cif_dic is not None return cif_dic
def as_json(self, filename=None, compact=False): import json d = self.as_dict() if compact: text = json.dumps(d, separators=(",", ":"), ensure_ascii=True) else: text = json.dumps(d, indent=2, ensure_ascii=True) if filename is not None: from libtbx import smart_open with smart_open.for_writing(filename) as f: f.write(text) else: return text
def smart_load_dictionary(name=None, file_path=None, url=None, registry_location=cifdic_register_url, save_local=False, store_dir=None): from iotbx import cif assert [name, file_path, url].count(None) < 3 cif_dic = None if store_dir is None: store_dir = libtbx.env.under_dist( module_name='iotbx', path='cif/dictionaries') if name is not None and [file_path, url].count(None) == 2: if file_path is None: if os.path.isfile(name): file_path = name else: file_path = os.path.join(store_dir, name) if not os.path.isfile(file_path): gzip_path = file_path + '.gz' if os.path.isfile(gzip_path): if save_local: gz = smart_open.for_reading(gzip_path) f = smart_open.for_writing(file_path) shutil.copyfileobj(gz, f) gz.close() f.close() else: file_path = gzip_path if file_path is not None and os.path.isfile(file_path): file_object = smart_open.for_reading(file_path) cif_dic = dictionary(cif.reader(file_object=file_object).model()) file_object.close() else: if url is None: url = locate_dictionary(name, registry_location=registry_location) file_object = urlopen(url) if save_local: if name is None: name = os.path.basename(url) f = open(os.path.join(store_dir, name), 'wb') shutil.copyfileobj(file_object, f) f.close() cif_dic = dictionary(cif.reader( file_path=os.path.join(store_dir, name)).model()) else: cif_dic = dictionary(cif.reader( file_object=file_object).model()) assert cif_dic is not None return cif_dic
def test_dxtbx_compressed(dials_regression, tmpdir): from dxtbx.format.Registry import Registry from dials_regression.image_examples.get_all_working_images import \ get_all_working_images # test that reading gz or bz2 compressed files works: it doesn't! from libtbx import smart_open import shutil tmpdir.chdir() for directory, image in get_all_working_images(): file_path = os.path.join(dials_regression, 'image_examples', directory, image) for ext in ('.gz', '.bz2')[:]: compressed_path = os.path.basename(file_path) + ext with open(file_path, 'rb') as f_in, smart_open.for_writing( compressed_path) as f_out: shutil.copyfileobj(f_in, f_out) print(file_path, compressed_path) format = Registry.find(compressed_path) try: i = format(compressed_path) except Exception: print('Error reading compressed file: %s' % compressed_path) import traceback traceback.print_exc() else: print('Successfully read compressed file: %s' % compressed_path) det = i.get_detector() if det is not None: size = det[0].get_image_size() b = i.get_beam() g = i.get_goniometer() s = i.get_scan() try: d = i.get_raw_data() except IOError: pass
def to_smv_format_py(self,fileout,intfile_scale=0.0,debug_x=-1,debug_y=-1, rotmat=False,extra=None,verbose=False,gz=False): byte_order = "little_endian"; #recast the image file write to Python to afford extra options: rotmat, extra, gz if gz: from libtbx.smart_open import for_writing outfile = for_writing(file_name=fileout+".gz", gzip_mode="wb") else: outfile = open(fileout,"w"); outfile.write("{\nHEADER_BYTES=1024;\nDIM=2;\nBYTE_ORDER=%s;\nTYPE=unsigned_short;\n"%byte_order); outfile.write("SIZE1=%d;\nSIZE2=%d;\nPIXEL_SIZE=%g;\nDISTANCE=%g;\n"%( self.detpixels_fastslow[0],self.detpixels_fastslow[1],self.pixel_size_mm,self.distance_mm)); outfile.write("WAVELENGTH=%g;\n"%self.wavelength_A); outfile.write("BEAM_CENTER_X=%g;\nBEAM_CENTER_Y=%g;\n"%self.beam_center_mm); outfile.write("ADXV_CENTER_X=%g;\nADXV_CENTER_Y=%g;\n"%self.adxv_beam_center_mm); outfile.write("MOSFLM_CENTER_X=%g;\nMOSFLM_CENTER_Y=%g;\n"%self.mosflm_beam_center_mm); outfile.write("DENZO_X_BEAM=%g;\nDENZO_Y_BEAM=%g;\n"%self.denzo_beam_center_mm); outfile.write("DIALS_ORIGIN=%g,%g,%g\n"%self.dials_origin_mm); outfile.write("XDS_ORGX=%g;\nXDS_ORGY=%g;\n"%self.XDS_ORGXY); outfile.write("CLOSE_DISTANCE=%g;\n"%self.close_distance_mm); outfile.write("PHI=%g;\nOSC_START=%g;\nOSC_RANGE=%g;\n"%(self.phi_deg,self.phi_deg,self.osc_deg)); outfile.write("TIME=%g;\n"%self.exposure_s); outfile.write("TWOTHETA=%g;\n"%self.detector_twotheta_deg); outfile.write("DETECTOR_SN=000;\n"); outfile.write("ADC_OFFSET=%g;\n"%self.adc_offset_adu); outfile.write("BEAMLINE=fake;\n"); if rotmat: from scitbx.matrix import sqr RSABC = sqr(self.Amatrix).inverse().transpose() outfile.write("DIRECT_SPACE_ABC=%s;\n"%(",".join([repr(a) for a in RSABC.elems]))) if extra is not None: outfile.write(extra) outfile.write("}\f"); assert outfile.tell() < 1024, "SMV header too long, please edit this code and ask for more bytes." while ( outfile.tell() < 1024 ): outfile.write(" ") from boost.python import streambuf self.to_smv_format_streambuf(streambuf(outfile),intfile_scale,debug_x,debug_y) outfile.close();
def as_json(self, filename=None, compact=False, n_digits=None, datablocks=None): import json d = self.as_dict(n_digits=n_digits) if datablocks: d['datablocks'] = [db.to_dict() for db in datablocks] if compact: text = json.dumps(d, separators=(',', ':'), ensure_ascii=True) else: text = json.dumps(d, separators=(',', ': '), indent=1, ensure_ascii=True) if filename is not None: from libtbx import smart_open with smart_open.for_writing(filename) as f: f.write(text) else: return text
# test that reading gz or bz2 compressed files works: it doesn't! from libtbx import smart_open from libtbx.test_utils import open_tmp_directory import shutil tmp_dir = open_tmp_directory() print tmp_dir for directory, image in get_all_working_images(): file_path = os.path.join(dials_regression, 'image_examples', directory, image) for ext in ('.gz', '.bz2')[:]: compressed_path = os.path.join(tmp_dir, os.path.basename(file_path)) + ext with open(file_path, 'rb') as f_in, smart_open.for_writing(compressed_path) as f_out: shutil.copyfileobj(f_in, f_out) print file_path, compressed_path format = Registry.find(compressed_path) try: i = format(compressed_path) except Exception, e: print 'Error reading compressed file: %s' %compressed_path import traceback traceback.print_exc() else: print 'Successfully read compressed file: %s' %compressed_path det = i.get_detector() if det is not None: size = det[0].get_image_size() b = i.get_beam()
# test that reading gz or bz2 compressed files works: it doesn't! from libtbx import smart_open from libtbx.test_utils import open_tmp_directory import shutil tmp_dir = open_tmp_directory() print tmp_dir for directory, image in get_all_working_images(): file_path = os.path.join(dials_regression, 'image_examples', directory, image) for ext in ('.gz', '.bz2')[:]: compressed_path = os.path.join(tmp_dir, os.path.basename(file_path)) + ext with open(file_path, 'rb') as f_in, smart_open.for_writing( compressed_path) as f_out: shutil.copyfileobj(f_in, f_out) print file_path, compressed_path format = Registry.find(compressed_path) try: i = format(compressed_path) except Exception, e: print 'Error reading compressed file: %s' % compressed_path import traceback traceback.print_exc() else: print 'Successfully read compressed file: %s' % compressed_path det = i.get_detector() if det is not None: size = det[0].get_image_size() b = i.get_beam()