def test_compilelock_errors(): with tempfile.TemporaryDirectory() as dir: with pytest.raises(ValueError): with lock_ctx(dir, timeout=0): pass with pytest.raises(ValueError): with lock_ctx(dir, timeout=-2): pass
def test_fn_2(arg): try: with lock_ctx(dir_name, timeout=0.1): # If this can get the lock, then our file lock has failed raise AssertionError() except filelock.Timeout: # It timed out, which means that the lock was still held by the # first thread arg.append(True)
def run_locking_test(ctx): with tempfile.TemporaryDirectory() as dir_name: assert get_subprocess_lock_state(ctx, dir_name) == "unlocked" # create a lock on the test directory with lock_ctx(dir_name): dir_key = f"{dir_name}-{os.getpid()}" assert dir_key in local_mem._locks assert local_mem._locks[dir_key] assert get_subprocess_lock_state(ctx, dir_name) == "locked" with lock_ctx(dir_name, timeout=0.1): assert get_subprocess_lock_state(ctx, dir_name) == "locked" assert get_subprocess_lock_state(ctx, dir_name) == "locked" assert get_subprocess_lock_state(ctx, dir_name) == "unlocked"
def test_fn_2(arg): try: with lock_ctx(dir_name, timeout=0.1): # If this can get the lock, then our file lock has failed raise AssertionError() except filelock.Timeout: # It timed out, which means that the lock was still held by the # first thread arg.append(True) except Exception: # If something unexpected happened, we want to know what it was traceback.print_exc()
def test_fn_1(arg): try: with lock_ctx(dir_name): # Notify the outside that we've obtained the lock arg.append(False) while True not in arg: time.sleep(0.5) except Exception: # Notify the outside that we done arg.append(False) # If something unexpected happened, we want to know what it was traceback.print_exc()
def test_compilelock_force_unlock(): with tempfile.TemporaryDirectory() as dir_name: with lock_ctx(dir_name): dir_key = f"{dir_name}-{os.getpid()}" assert dir_key in local_mem._locks assert local_mem._locks[dir_key] force_unlock(dir_name) assert dir_key not in local_mem._locks # A sub-process forcing unlock... ctx = multiprocessing.get_context("spawn") p = ctx.Process(target=force_unlock, args=(dir_name, )) p.start() p.join() assert dir_key not in local_mem._locks
del sys.path[0] try: try_import() need_reload = True if version != getattr(scan_perform, "_version", None): raise ImportError("Scan code version mismatch") except ImportError: dirname = "scan_perform" loc = os.path.join(config.compiledir, dirname) os.makedirs(loc, exist_ok=True) with lock_ctx(loc): # Maybe someone else already finished compiling it while we were # waiting for the lock? try: if need_reload: # The module was successfully imported earlier: we need to # reload it to check if the version was updated. try_reload() else: try_import() need_reload = True if version != getattr(scan_perform, "_version", None): raise ImportError() except ImportError:
# will be imported and compile_str won't get called at all. sys.path.insert(0, config.compiledir) location = os.path.join(config.compiledir, "cutils_ext") if not os.path.exists(location): try: os.mkdir(location) except OSError as e: assert e.errno == errno.EEXIST assert os.path.exists(location), location if not os.path.exists(os.path.join(location, "__init__.py")): open(os.path.join(location, "__init__.py"), "w").close() try: from cutils_ext.cutils_ext import * # noqa except ImportError: with lock_ctx(): # Ensure no-one else is currently modifying the content of the compilation # directory. This is important to prevent multiple processes from trying to # compile the cutils_ext module simultaneously. try: # We must retry to import it as some other process could # have been compiling it between the first failed import # and when we receive the lock from cutils_ext.cutils_ext import * # noqa except ImportError: compile_cutils() from cutils_ext.cutils_ext import * # noqa finally: if sys.path[0] == config.compiledir: del sys.path[0]
def check_is_locked(dir_name, q): try: with lock_ctx(dir_name, timeout=0.1): q.put("unlocked") except filelock.Timeout: q.put("locked")
def test_fn_1(): with lock_ctx(dir_name): # Sleep "indefinitely" time.sleep(100)