def process(self, in_file, **kwargs): out_file = self.get_output_file(in_file, **kwargs) cmd = self.cmd_template.format(input=self.get_input_path(in_file), output=self.get_output_path(out_file), **kwargs).split() stdout_pipe, stdout_queue, stdout_reader = None, None, None stderr_pipe, stderr_queue, stderr_reader = None, None, None if callable(self.stdout_handler): stdout_pipe = subprocess.PIPE stdout_queue = queue.Queue() if self.stderr_handler is True: stderr_pipe = subprocess.STDOUT elif callable(self.stderr_handler): stderr_pipe = subprocess.PIPE stderr_queue = queue.Queue() proc = subprocess.Popen(cmd, stdout=stdout_pipe, stderr=stderr_pipe, universal_newlines=True) if stdout_queue is not None: stdout_reader = AsynchronousFileReader(proc.stdout, stdout_queue) stdout_reader.start() if stderr_queue is not None: stderr_reader = AsynchronousFileReader(proc.stderr, stderr_queue) stderr_reader.start() if stdout_reader is None and stderr_reader is None: proc.wait() else: stdout_args, stderr_args = (), () try: while not (stdout_reader is None or stdout_reader.eof()) or \ not (stderr_reader is None or stderr_reader.eof()): if stdout_queue is not None: while not stdout_queue.empty(): stdout_args = self.stdout_handler( # pylint: disable=not-callable stdout_queue.get(), *stdout_args) or () if stderr_queue is not None: while not stderr_queue.empty(): stderr_args = self.stderr_handler( # pylint: disable=not-callable stderr_queue.get(), *stderr_args) or () time.sleep(self.sleep_time) except ProcessingError: if proc.poll() is None: proc.terminate() raise finally: # wait for process to finish, so we can check the return value if proc.poll() is None: proc.wait() if stdout_reader is not None: stdout_reader.join() proc.stdout.close() if stderr_reader is not None: stderr_reader.join() proc.stderr.close() if proc.returncode < 0: raise ProcessingError("There was a problem processing this file.") return out_file
def test_thread_safety(self): exceptions = [] def threaded_view(resp_queue, view, request): import time; time.sleep(2) try: inst = view(request) resp_queue.put(request.GET['name']) except Exception as e: exceptions.append(e) raise class ThreadedSearchView(SearchView): def __call__(self, request): print("Name: %s" % request.GET['name']) return super(ThreadedSearchView, self).__call__(request) view = search_view_factory(view_class=ThreadedSearchView) resp_queue = queue.Queue() request_1 = HttpRequest() request_1.GET = {'name': 'foo'} request_2 = HttpRequest() request_2.GET = {'name': 'bar'} th1 = Thread(target=threaded_view, args=(resp_queue, view, request_1)) th2 = Thread(target=threaded_view, args=(resp_queue, view, request_2)) th1.start() th2.start() th1.join() th2.join() foo = resp_queue.get() bar = resp_queue.get() self.assertNotEqual(foo, bar)
def test_read_and_write_with_threads(self): to_me = queue.Queue() def get_read_lock(): with TableLock(read=[Alphabet]): to_me.put("Reading") try: # Thread can obtain lock alone other_thread = Thread(target=get_read_lock) other_thread.start() assert to_me.get() == "Reading" with TableLock(write=[Alphabet]): other_thread = Thread(target=get_read_lock) other_thread.start() # Doesn't lock now with pytest.raises(queue.Empty): to_me.get(timeout=0.2) # Gets now assert to_me.get() == "Reading" finally: other_thread.join()
def test_threads_concurrent_access(self): """ Test that the same lock object can be used in multiple threads, allows the definition of a lock upfront in a module. """ to_me = queue.Queue() to_you = queue.Queue() the_lock = Lock('THElock', 0.05) def check_it_lock_it(): assert not the_lock.is_held() with the_lock: to_me.put("Locked") to_you.get(True) other_thread = Thread(target=check_it_lock_it) other_thread.start() try: item = to_me.get(True) assert item == "Locked" cursor = connection.cursor() cursor.execute("SELECT CONNECTION_ID()") own_connection_id = cursor.fetchone()[0] assert the_lock.is_held() assert the_lock.holding_connection_id() != own_connection_id with pytest.raises(TimeoutError): with the_lock: pass to_you.put("Stop") finally: other_thread.join() with the_lock: pass
def test_timeout_with_threads(self): to_me = queue.Queue() to_you = queue.Queue() def lock_until_told(): with Lock('threading_test'): to_me.put("Locked") to_you.get(True) threading_test = Lock('threading_test', 0.05) assert not threading_test.is_held() other_thread = Thread(target=lock_until_told) other_thread.start() try: item = to_me.get(True) assert item == "Locked" cursor = connection.cursor() cursor.execute("SELECT CONNECTION_ID();") own_connection_id = cursor.fetchone()[0] assert threading_test.is_held() assert threading_test.holding_connection_id() != own_connection_id with pytest.raises(TimeoutError): with threading_test: pass to_you.put("Stop") finally: other_thread.join() assert not threading_test.is_held() with threading_test: pass
def test_thread_safety(self): # This is a regression. ``SearchIndex`` used to write to # ``self.prepared_data``, which would leak between threads if things # went too fast. exceptions = [] def threaded_prepare(index_queue, index, model): try: index.queue = index_queue prepped = index.prepare(model) except Exception as e: exceptions.append(e) raise class ThreadedSearchIndex(GoodMockSearchIndex): def prepare_author(self, obj): if obj.pk == 20: time.sleep(0.1) else: time.sleep(0.5) index_queue.put(self.prepared_data['author']) return self.prepared_data['author'] tmi = ThreadedSearchIndex() index_queue = queue.Queue() mock_1 = MockModel() mock_1.pk = 20 mock_1.author = 'foo' mock_1.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0) mock_2 = MockModel() mock_2.pk = 21 mock_2.author = 'daniel%s' % mock_2.id mock_2.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0) th1 = Thread(target=threaded_prepare, args=(index_queue, tmi, mock_1)) th2 = Thread(target=threaded_prepare, args=(index_queue, tmi, mock_2)) th1.start() th2.start() th1.join() th2.join() mock_1_result = index_queue.get() mock_2_result = index_queue.get() self.assertEqual(mock_1_result, 'foo') self.assertEqual(mock_2_result, 'daniel21')
import os import sys import time import signal import threading import atexit from django.utils.six.moves import queue _interval = 1.0 _times = {} _files = [] _running = False _queue = queue.Queue() _lock = threading.Lock() def _restart(path): _queue.put(True) prefix = 'monitor (pid=%d):' % os.getpid() print('%s Change detected to \'%s\'.' % (prefix, path), file=sys.stderr) print('%s Triggering process restart.' % prefix, file=sys.stderr) os.kill(os.getpid(), signal.SIGINT) def _modified(path): try: # If path doesn't denote a file and were previously # tracking it, then it has been removed or the file type # has changed so force a restart. If not previously # tracking the file then we can ignore it as probably