class Resource: def __init__(self, name='Resource'): self._lock = Lock() self._log = get_logger(f'Resource [{name}]', LogLevel.DEBUG) self._current_priority = -1 self._active_handle = null_handle def __enter__(self): self._lock.__enter__() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.__exit__(exc_type, exc_val, exc_tb) def reset(self): self._log('Reset') with self._lock: handle, self._active_handle = self._active_handle, null_handle if handle: self._log('Interrupting active resource handle') handle.interrupt() self._current_priority = -1 def request(self, with_priority=0, on_taken_away=None): with self._lock: if not self._active_handle: self._log(f'create handle for priority {with_priority}') self._create_new_handle(with_priority, on_taken_away) return self._active_handle elif self._current_priority >= with_priority: self._log( f'taking from lower prio owner (request: {with_priority}, holder: {self._current_priority})' ) self._active_handle.interrupt() self._create_new_handle(with_priority, on_taken_away) return self._active_handle else: self._log( f'failed to take resource (request: {with_priority}, holder: {self._current_priority})' ) return null_handle def _create_new_handle(self, with_priority, on_taken_away): self._current_priority = with_priority self._active_handle = ResourceHandle(self) if on_taken_away: self._active_handle.on_interrupted.add(on_taken_away) def release(self, resource_handle): with self._lock: if self._active_handle == resource_handle: self._active_handle = null_handle self._current_priority = -1 self._log('released') else: self._log('failed to release, not owned')
class Mutex(object): """Models a mutex object which provides a lock over an object, identified by name. :type name: str :param name: the name of the lock to be created (must be unique) """ _current_mutex = {} def __init__(self, name): self._lock = Lock() self.name = name @classmethod def get_mutex(cls, name="_default"): """Class method to create unique mutex, with name or using default name "_default". """ if name not in Mutex._current_mutex: Mutex._current_mutex[name] = Mutex(name) return Mutex._current_mutex[name] def __del__(self): Lock.__del__(self) if self.name in Mutex._current_mutex: del Mutex._current_mutex[self.name] def __enter__(self): return self._lock.__enter__() def __exit__(self, typ, value, traceback): return self._lock.__exit__()
class PicklableLock(object): """ A wrapper for threading.Lock which discards its state during pickling and is reinitialized unlocked when unpickled. """ def __init__(self): self.lock = Lock() def __getstate__(self): return '' def __setstate__(self, value): return self.__init__() def __enter__(self): self.lock.__enter__() def __exit__(self, exc_type, exc_val, exc_tb): self.lock.__exit__(exc_type, exc_val, exc_tb)
class PickleableLock(object): def __init__(self): self.lock = Lock() def __getstate__(self): return '' def __setstate__(self, value): return self.__init__() def __getattr__(self, item): return self.lock.__getattr__(item) def __enter__(self): self.lock.__enter__() def __exit__(self, exc_type, exc_val, exc_tb): self.lock.__exit__(exc_type, exc_val, exc_tb)
class _LoggingMutexThreading(_LoggingMutex): def __init__(self, name): super(_LoggingMutexThreading, self).__init__(name) from threading import Lock self.mutex = Lock() def currentThreadID(self): from threading import currentThread return currentThread().ident def _acquire(self): self.mutex.acquire() def _release(self): self.mutex.release() def enterMutex(self): self.mutex.__enter__() def exitMutex(self, *args, **kwargs): self.mutex.__exit__(*args, **kwargs)
class LockedList(list): """ A list that supports the ``with`` keyword with a built-in lock. Though Python lists are thread-safe in that they will not raise exceptions during concurrent access, they do not guarantee atomicity. This class will let you gain atomicity when needed. """ def __init__(self, *args, **kwargs): super(LockedList, self).__init__(*args, **kwargs) self.lock = Lock() def __enter__(self): return self.lock.__enter__() def __exit__(self, the_type, value, traceback): return self.lock.__exit__(the_type, value, traceback)
class Mutex(object): _current_mutex = {} def __init__(self, name): self._lock = Lock() self.name = name @classmethod def get_mutex(cls, name="_default"): if name not in Mutex._current_mutex: Mutex._current_mutex[name] = Mutex(name) return Mutex._current_mutex[name] def __del__(self): Lock.__del__(self) if self.name in Mutex._current_mutex: del Mutex._current_mutex[self.name] def __enter__(self): return self._lock.__enter__() def __exit__(self, typ, value, traceback): return self._lock.__exit__()
class SerializableLock: """A Serializable per-process Lock This wraps a normal ``threading.Lock`` object and satisfies the same interface. However, this lock can also be serialized and sent to different processes. It will not block concurrent operations between processes (for this you should look at ``multiprocessing.Lock`` or ``locket.lock_file`` but will consistently deserialize into the same lock. So if we make a lock in one process:: lock = SerializableLock() And then send it over to another process multiple times:: bytes = pickle.dumps(lock) a = pickle.loads(bytes) b = pickle.loads(bytes) Then the deserialized objects will operate as though they were the same lock, and collide as appropriate. This is useful for consistently protecting resources on a per-process level. The creation of locks is itself not threadsafe. """ _locks: ClassVar[WeakValueDictionary[Hashable, Lock]] = WeakValueDictionary() token: Hashable lock: Lock def __init__(self, token: Hashable | None = None): self.token = token or str(uuid.uuid4()) if self.token in SerializableLock._locks: self.lock = SerializableLock._locks[self.token] else: self.lock = Lock() SerializableLock._locks[self.token] = self.lock def acquire(self, *args, **kwargs): return self.lock.acquire(*args, **kwargs) def release(self, *args, **kwargs): return self.lock.release(*args, **kwargs) def __enter__(self): self.lock.__enter__() def __exit__(self, *args): self.lock.__exit__(*args) def locked(self): return self.lock.locked() def __getstate__(self): return self.token def __setstate__(self, token): self.__init__(token) def __str__(self): return f"<{self.__class__.__name__}: {self.token}>" __repr__ = __str__
class SerializableLock(object): _locks = WeakValueDictionary() """ A Serializable per-process Lock This wraps a normal ``threading.Lock`` object and satisfies the same interface. However, this lock can also be serialized and sent to different processes. It will not block concurrent operations between processes (for this you should look at ``multiprocessing.Lock`` or ``locket.lock_file`` but will consistently deserialize into the same lock. So if we make a lock in one process:: lock = SerializableLock() And then send it over to another process multiple times:: bytes = pickle.dumps(lock) a = pickle.loads(bytes) b = pickle.loads(bytes) Then the deserialized objects will operate as though they were the same lock, and collide as appropriate. This is useful for consistently protecting resources on a per-process level. The creation of locks is itself not threadsafe. This class was taken from dask.utils.py Copyright (c) 2014-2018, Anaconda, Inc. and contributors All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of Anaconda nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ def __init__(self, token=None): self.token = token or str(uuid.uuid4()) if self.token in SerializableLock._locks: self.lock = SerializableLock._locks[self.token] else: self.lock = Lock() SerializableLock._locks[self.token] = self.lock def acquire(self, *args, **kwargs): return self.lock.acquire(*args, **kwargs) def release(self, *args, **kwargs): return self.lock.release(*args, **kwargs) def __enter__(self): self.lock.__enter__() def __exit__(self, *args): self.lock.__exit__(*args) def locked(self): return self.lock.locked() def __getstate__(self): return self.token def __setstate__(self, token): self.__init__(token) def __str__(self): return "<%s: %s>" % (self.__class__.__name__, self.token) __repr__ = __str__
class SerializableLock(object): _locks = WeakValueDictionary() """ A Serializable per-process Lock This wraps a normal ``threading.Lock`` object and satisfies the same interface. However, this lock can also be serialized and sent to different processes. It will not block concurrent operations between processes (for this you should look at ``multiprocessing.Lock`` or ``locket.lock_file`` but will consistently deserialize into the same lock. So if we make a lock in one process:: lock = SerializableLock() And then send it over to another process multiple times:: bytes = pickle.dumps(lock) a = pickle.loads(bytes) b = pickle.loads(bytes) Then the deserialized objects will operate as though they were the same lock, and collide as appropriate. This is useful for consistently protecting resources on a per-process level. The creation of locks is itself not threadsafe. """ def __init__(self, token=None): self.token = token or str(uuid.uuid4()) if self.token in SerializableLock._locks: self.lock = SerializableLock._locks[self.token] else: self.lock = Lock() SerializableLock._locks[self.token] = self.lock def acquire(self, *args): return self.lock.acquire(*args) def release(self, *args): return self.lock.release(*args) def __enter__(self): self.lock.__enter__() def __exit__(self, *args): self.lock.__exit__(*args) @property def locked(self): return self.locked def __getstate__(self): return self.token def __setstate__(self, token): self.__init__(token) def __str__(self): return "<%s: %s>" % (self.__class__.__name__, self.token) __repr__ = __str__