class FdfsUploader(object): def __init__(self): self.cli = client.Fdfs_client(client.get_tracker_conf(CONF_FILE)) self.queue = UniqueQ(QUEUE_NAME) def add_to_queue(self, local_file_full_path): self.queue.put(local_file_full_path) self.cli.modify_by_buffer() @staticmethod def start_thread_listen(): t1 = threading.Thread(target=listen_thread) t1.start()
def test_add_duplicate_item(self): q = UniqueQ(self.path) q.put(1111) self.assertEqual(1, q.size) # put duplicate item q.put(1111) self.assertEqual(1, q.size) q.put(2222) self.assertEqual(2, q.size) del q q = UniqueQ(self.path) self.assertEqual(2, q.size)
def test_multiple_consumers(self): """Test UniqueQ can be used by multiple consumers.""" queue = UniqueQ(path=self.path, multithreading=True, auto_commit=self.auto_commit) def producer(): for x in range(1000): queue.put('var%d' % x) counter = [] # Set all to 0 for _ in range(1000): counter.append(0) def consumer(index): for i in range(200): data = queue.get(block=True) self.assertTrue('var' in data) counter[index * 200 + i] = data p = Thread(target=producer) p.start() consumers = [] for index in range(5): t = Thread(target=consumer, args=(index, )) t.start() consumers.append(t) p.join() for t in consumers: t.join() self.assertEqual(0, queue.qsize()) for x in range(1000): self.assertNotEqual(0, counter[x], "not 0 for counter's index %s" % x) self.assertEqual(len(set(counter)), len(counter))
def test_unique_dictionary_serialization_json(self): queue = UniqueQ(path=self.path, multithreading=True, auto_commit=self.auto_commit, serializer=serializers.json) queue.put({"foo": 1, "bar": 2}) self.assertEqual(queue.total, 1) queue.put({"bar": 2, "foo": 1}) self.assertEqual(queue.total, 1)
def __init__(self): self.cli = client.Fdfs_client(client.get_tracker_conf(CONF_FILE)) self.queue = UniqueQ(QUEUE_NAME)