def benchmark_file_write(self): """Writing <BENCHMARK_COUNT> items.""" self.path = tempfile.mkdtemp('b_file_10000') q = Queue(self.path) for i in range(BENCHMARK_COUNT): q.put('bench%d' % i) assert q.qsize() == BENCHMARK_COUNT
def test_task_done_too_many_times(self, serializer): """Test too many task_done called.""" q = Queue(self.path, **serializer_params[serializer]) q.put('var1') q.get() q.task_done() with self.assertRaises(ValueError): q.task_done()
def test_task_done_too_many_times(self): """Test too many task_done called.""" q = Queue(self.path) q.put(b'var1') q.get() q.task_done() with self.assertRaises(ValueError): q.task_done()
def test_open_close_single(self): """Write 1 item, close, reopen checking if same item is there""" q = Queue(self.path) q.put(b'var1') del q q = Queue(self.path) self.assertEqual(1, q.qsize()) self.assertEqual(b'var1', q.get()) q.task_done()
def test_open_close_single(self, serializer): """Write 1 item, close, reopen checking if same item is there""" q = Queue(self.path, **serializer_params[serializer]) q.put('var1') del q q = Queue(self.path, **serializer_params[serializer]) self.assertEqual(1, q.qsize()) self.assertEqual('var1', q.get()) q.task_done()
def test_empty(self): q = Queue(self.path) self.assertEqual(q.empty(), True) q.put('var1') self.assertEqual(q.empty(), False) q.get() self.assertEqual(q.empty(), True)
def test_get_timeout(self): """Test when get failed within timeout.""" q = Queue(self.path) q.put(b'var1') q.get() with self.assertRaises(Empty): q.get(timeout=1)
def test_get_timeout(self, serializer): """Test when get failed within timeout.""" q = Queue(self.path, **serializer_params[serializer]) q.put('var1') q.get() with self.assertRaises(Empty): q.get(timeout=1)
def test_protocol(self): # test that protocol is set properly expect_protocol = 2 if sys.version_info[0] == 2 else 4 self.assertEqual( persistqueue.serializers.pickle.protocol, expect_protocol, ) # test that protocol is used properly serializer = namedtuple("Serializer", ["dump", "load"])( persistqueue.serializers.pickle.dump, lambda fp: fp.read()) q = Queue(path=self.path, serializer=serializer) q.put(b'a') self.assertEqual(q.get(), pickle.dumps(b'a', protocol=expect_protocol))
class FileQueue(QueueInterface): def __init__(self, path: str): self.queue = Queue(path, autosave=True) def is_empty(self) -> bool: return self.queue.empty() def get_items(self) -> Generator[str, None, None]: while not self.is_empty(): yield self.queue.get() return def enqueue(self, value: str): self.queue.put(value)
def test_autosave_get(self, serializer): """test the autosave feature saves on get()""" q = Queue(self.path, autosave=True, **serializer_params[serializer]) q.put('var1') q.put('var2') self.assertEqual('var1', q.get()) del q # queue should save on get(), only one item should remain q = Queue(self.path, autosave=True, **serializer_params[serializer]) self.assertEqual(1, q.qsize()) self.assertEqual('var2', q.get()) del q
def benchmark_file_read_write_true(self): """Writing and reading <BENCHMARK_COUNT> items(many task_done).""" self.path = tempfile.mkdtemp('b_file_10000') q = Queue(self.path) for i in range(BENCHMARK_COUNT): q.put('bench%d' % i) for i in range(BENCHMARK_COUNT): q.get() q.task_done() assert q.qsize() == 0
def test_random_read_write(self): """Test random read/write""" q = Queue(self.path) n = 0 for i in range(1000): if random.random() < 0.5: if n > 0: q.get_nowait() q.task_done() n -= 1 else: with self.assertRaises(Empty): q.get_nowait() else: q.put('var%d' % random.getrandbits(16)) n += 1
def benchmark_file_wr_10000(self): """Benchmark file queue by writing and reading <BENCHMARK_COUNT> items.""" self.path = tempfile.mkdtemp('b_file_10000') q = Queue(self.path) for i in range(BENCHMARK_COUNT): q.put('bench%d' % i) q.task_done() for i in range(BENCHMARK_COUNT): q.get()
def open(self, **kwargs) -> "PersistQueueConnection": if self.queue_engine == "sqlite": queue = SQLiteQueue(self.queue_path, auto_commit=False) elif self.queue_engine == "file": queue = Queue(self.queue_path) else: raise ValueError("bad queue engine value") self._queue = queue return self
def test_put_block_and_wait(self): """Test block until queue is not full.""" q = Queue(self.path, maxsize=10) def consumer(): for i in range(5): q.get() q.task_done() def producer(): for j in range(16): q.put('var%d' % j) p = Thread(target=producer) p.start() c = Thread(target=consumer) c.start() c.join() val = q.get_nowait() p.join() self.assertEqual('var5', val)
def benchmark_file_read_write_autosave(self): """Writing and reading <BENCHMARK_COUNT> items(autosave).""" self.path = tempfile.mkdtemp('b_file_10000') q = Queue(self.path, autosave=True) for i in range(BENCHMARK_COUNT): q.put('bench%d' % i) for i in range(BENCHMARK_COUNT): q.get() assert q.qsize() == 0
def test_put_timeout_reached(self): """Test put with block and timeout.""" q = Queue(self.path, maxsize=2) for x in range(2): q.put(x) with self.assertRaises(Full): q.put(b'full_and_timeout', block=True, timeout=1)
def test_put_maxsize_reached(self): """Test that maxsize reached.""" q = Queue(self.path, maxsize=10) for x in range(10): q.put(x) with self.assertRaises(Full): q.put(b'full_now', block=False)
def test_put_maxsize_reached(self, serializer): """Test that maxsize reached.""" q = Queue(self.path, maxsize=10, **serializer_params[serializer]) for x in range(10): q.put(x) with self.assertRaises(Full): q.put('full_now', block=False)
def test_put_timeout_reached(self, serializer): """Test put with block and timeout.""" q = Queue(self.path, maxsize=2, **serializer_params[serializer]) for x in range(2): q.put(x) with self.assertRaises(Full): q.put('full_and_timeout', block=True, timeout=1)
def tqueue(project, queue_id, path): # to do: parametrize with proj and path p_handler = PROJ_Handler(project, path) print("Project handler initialized:", p_handler.start_dir) q = Queue(settings.QUEUE_PREFIX + project + queue_id) walk = os.walk(p_handler.start_dir) i = 0 for root, dirs, files in walk: if len(files) > 0: print("=============================: ", root, dirs, files) add = p_handler.get_file_facets(root, dirs, files) add = p_handler.get_md(add, root, files) add = p_handler.get_posix(add, root, files) print(add) # update elastic search i_list = [] for item, i_dict in add.items(): i += 1 print("index update: ", i) if len(i_dict) > 0: q.put(i_dict) print("============================")
def test_autosave_join(self, serializer): """Enabling autosave should still allow task_done/join behavior""" q = Queue(self.path, autosave=True, **serializer_params[serializer]) for i in range(10): q.put('var%d' % i) def consumer(): for i in range(10): q.get() # this should still 'count down' properly and allow q.join() # to finish q.task_done() c = Thread(target=consumer) c.start() q.join() with self.assertRaises(Empty): q.get_nowait()
def test_windows_error(self): """Test the rename restrictions of Windows""" q = Queue(self.path) q.put(b'a') fake_error = OSError('Cannot create a file when' 'that file already exists') setattr(fake_error, 'winerror', 183) os_rename = os.rename i = [] def fake_remove(src, dst): if not i: i.append(1) raise fake_error else: i.append(2) os_rename(src, dst) with mock.patch('os.rename', new=fake_remove): q.put(b'b') self.assertTrue(b'a', q.get()) self.assertTrue(b'b', q.get())
def test_multi_threaded(self): """Create consumer and producer threads, check parallelism""" q = Queue(self.path) def producer(): for i in range(1000): q.put('var%d' % i) def consumer(): for i in range(1000): q.get() q.task_done() c = Thread(target=consumer) c.start() p = Thread(target=producer) p.start() c.join() p.join() q.join() with self.assertRaises(Empty): q.get_nowait()
# You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from pathlib import Path from persistqueue import Queue import threading import time from PyQt5.QtWidgets import QApplication from plus import config, util, roast, connection, sync, controller queue_path = str( (Path(util.getDataDirectory()) / config.outbox_cache).resolve()) queue = Queue(queue_path) # queue entries are dictionaries with entries # url : the URL to send the request to # data : the data dictionary that will be send in the body as JSON # verb : the HTTP verb to be used (POST or PUT) worker_thread = None class Concur(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.daemon = True # OK for main to exit even if instance is still running self.paused = False # start out non-paused self.state = threading.Condition()
sys.path.append('./video-splitter') from collections import Counter import tweepy import re import time import os from persistqueue import Queue import threading import random import settings import anim from comment_list_brige import Comment splitter = __import__("ffmpeg-split") mention_queue = Queue('queue') def sanitize_tweet(tweet): tweet.full_text = re.sub(r'^(@\S+ )+', '', tweet.full_text) tweet.full_text = re.sub(r'(https)\S*', '(link)', tweet.full_text) def update_id(id): with open('id.txt', 'w') as idFile: idFile.write(id) def postVideoTweet(reply_id, reply_name, filename): uploaded_media = api.media_upload(filename, media_category='TWEET_VIDEO') while (uploaded_media.processing_info['state'] == 'pending'):
def test_partial_write(self): """Test recovery from previous crash w/ partial write""" q = Queue(self.path) for i in range(100): q.put('var%d' % i) del q with open(os.path.join(self.path, 'q00000'), 'ab') as f: pickle.dump('文字化け', f) q = Queue(self.path) self.assertEqual(100, q.qsize()) for i in range(100): self.assertEqual('var%d' % i, q.get()) q.task_done() with self.assertRaises(Empty): q.get_nowait()
def test_open_close_1000(self): """Write 1000 items, close, reopen checking if all items are there""" q = Queue(self.path) for i in range(1000): q.put('var%d' % i) self.assertEqual(1000, q.qsize()) del q q = Queue(self.path) self.assertEqual(1000, q.qsize()) for i in range(1000): data = q.get() self.assertEqual('var%d' % i, data) q.task_done() with self.assertRaises(Empty): q.get_nowait() # assert adding another one still works q.put(b'foobar') data = q.get()
def test_clear_tail_file(self): """Teat that only remove tail file when calling task_done.""" q = Queue(self.path, chunksize=10) for i in range(35): q.put('var%d' % i) for _ in range(15): q.get() q = Queue(self.path, chunksize=10) self.assertEqual(q.qsize(), 35) for _ in range(15): q.get() # the first tail file gets removed after task_done q.task_done() for _ in range(16): q.get() # the second and third files get removed after task_done q.task_done() self.assertEqual(q.qsize(), 4)