def _green_existing_locks(): """Make locks created before monkey-patching safe. RLocks rely on a Lock and on Python 2, if an unpatched Lock blocks, it blocks the native thread. We need to replace these with green Locks. This was originally noticed in the stdlib logging module.""" import gc import threading import eventlet.green.thread lock_type = type(threading.Lock()) rlock_type = type(threading.RLock()) if sys.version_info[0] >= 3: pyrlock_type = type(threading._PyRLock()) # We're monkey-patching so there can't be any greenlets yet, ergo our thread # ID is the only valid owner possible. tid = eventlet.green.thread.get_ident() for obj in gc.get_objects(): if isinstance(obj, rlock_type): if (sys.version_info[0] == 2 and isinstance(obj._RLock__block, lock_type)): _fix_py2_rlock(obj, tid) elif (sys.version_info[0] >= 3 and not isinstance(obj, pyrlock_type)): _fix_py3_rlock(obj)
def _fix_py3_rlock(old): import gc import threading new = threading._PyRLock() while old._is_owned(): old.release() new.acquire() if old._is_owned(): new.acquire() gc.collect() for ref in gc.get_referrers(old): for k, v in vars(ref): if v == old: setattr(ref, k, new)
def _fix_py3_rlock(old): import gc import threading new = threading._PyRLock() while old._is_owned(): old.release() new.acquire() if old._is_owned(): new.acquire() gc.collect() for ref in gc.get_referrers(old): try: ref_vars = vars(ref) except TypeError: pass else: for k, v in ref_vars.items(): if v == old: setattr(ref, k, new)
def __init__(self, name='scan0', arches=[], data_file=None, scan_data=pd.DataFrame(), mg_args={'wavelength': 1e-10}, bai_1d_args={}, bai_2d_args={}, static=False, gi=False, th_mtr='th', overall_raw=0, single_img=False, global_mask=None, poni_dict={}): """name: string, name of sphere object. arches: list of EwaldArch object, data to intialize with data_file: str, path to hdf5 file where data is stored scan_data: DataFrame, scan metadata mg_args: dict, arguments for Multigeometry. Must include at least 'wavelength' attribute in Angstroems bai_1d_args: dict, arguments for the integrate1d method of pyFAI AzimuthalIntegrator bai_2d_args: dict, arguments for the integrate2d method of pyFAI AzimuthalIntegrator """ super().__init__() self.file_lock = Condition() if name is None: self.name = os.path.split(data_file)[-1].split('.')[0] else: self.name = name if data_file is None: self.data_file = name + ".hdf5" else: self.data_file = data_file self.static = static self.gi = gi self.th_mtr = th_mtr self.single_img = single_img if arches: self.arches = ArchSeries(self.data_file, self.file_lock, arches, static=self.static, gi=self.gi) else: self.arches = ArchSeries(self.data_file, self.file_lock, static=self.static, gi=self.gi) self.scan_data = scan_data self.mg_args = mg_args self.multi_geo = MultiGeometry([a.integrator for a in arches], **mg_args) self.bai_1d_args = bai_1d_args self.bai_2d_args = bai_2d_args self.mgi_1d = int_1d_data() self.mgi_2d = int_2d_data() self.sphere_lock = Condition(_PyRLock()) if self.static: self.bai_1d = int_1d_data_static() self.bai_2d = int_2d_data_static() else: self.bai_1d = int_1d_data() self.bai_2d = int_2d_data() self.overall_raw = overall_raw self.global_mask = global_mask self.poni_dict = poni_dict