def test_double_commit_rollback_commit_twice(self):
     self.assertFalse(b"1" in self.message_set)
     self.assertFalse(b"2" in self.message_set)
     self.message_set.prepare(b"1")
     self.message_set.prepare(b"2")
     cm_1 = self.message_set.commit()
     self.message_set.commit()
     self.assertTrue(b"1" in self.message_set)
     self.assertTrue(b"2" in self.message_set)
     self.message_set.prepare(b"3")
     self.message_set.commit()
     self.message_set.commit()
     self.assertTrue(b"1" in self.message_set)
     self.assertTrue(b"2" in self.message_set)
     self.assertTrue(b"3" in self.message_set)
     self.message_set.prepare(b"4")
     self.message_set.prepare(b"5")
     self.message_set.commit()
     self.message_set.commit()
     self.assertTrue(b"1" in self.message_set)
     self.assertTrue(b"2" in self.message_set)
     self.assertTrue(b"3" in self.message_set)
     self.assertTrue(b"4" in self.message_set)
     self.assertTrue(b"5" in self.message_set)
     self.message_set = DiskMessageSet('/tmp/message_set',
                                       commit_number=cm_1,
                                       recover_state_on_init=True)
     self.assertTrue(b"1" in self.message_set)
     self.assertTrue(b"2" in self.message_set)
     self.assertFalse(b"3" in self.message_set)
     self.assertFalse(b"4" in self.message_set)
     self.assertFalse(b"5" in self.message_set)
 def test_big_add_w_recovery(self):
     random.seed(0)
     for i in range(10000):
         text = "%d" % i
         self.assertFalse(text.encode() in self.message_set)
         if random.random() < 0.05:
             self.message_set.commit()
             self.message_set = DiskMessageSet('/tmp/message_set',
                                               recover_state_on_init=True)
         if random.random() > 0.5:
             self.message_set.prepare(text.encode())
     random.seed(0)
     for i in range(10000):
         text = "%d" % i
         if random.random() < 0.1:
             self.message_set.commit()
             self.message_set = DiskMessageSet('/tmp/message_set',
                                               recover_state_on_init=True)
         if random.random() > 0.5:
             self.message_set.commit()
             self.assertTrue(text.encode() in self.message_set)
         else:
             self.message_set.commit()
             self.assertFalse(text.encode() in self.message_set)
 def setUp(self) -> None:
     try:
         from pytest_cov.embed import cleanup_on_sigterm
     except ImportError:
         pass
     else:
         cleanup_on_sigterm()
     shutil.rmtree('/tmp/message_set', ignore_errors=True)
     os.mkdir('/tmp/message_set')
     self.message_set = DiskMessageSet('/tmp/message_set', recover_state_on_init=True)
     self.recv_pipe, self.write_pipe = Pipe(False)
     self.connection = pika.BlockingConnection(pika.ConnectionParameters(host="localhost"))
     self.channel = self.connection.channel()
     self.channel.queue_declare(queue=CONSUME_QUEUE)
     self.channel.queue_declare(queue=RESPONSE_QUEUE)
     self.channel.queue_purge(CONSUME_QUEUE)
     self.channel.queue_purge(RESPONSE_QUEUE)
     self.test_process = None
     self.consume_process = Process(target=self._read_process, args=(self.write_pipe,))
     self.consume_process.start()
 def setUp(self) -> None:
     shutil.rmtree('/tmp/message_set', ignore_errors=True)
     os.mkdir('/tmp/message_set')
     self.message_set = DiskMessageSet('/tmp/message_set',
                                       recover_state_on_init=True)
Example #5
0
def load_config(
        config_path: str,
        func_dict: Dict[str, Callable]) -> ConsumerProducerServiceConfig:
    """
    Loads the config for the server

    :param config_path: the path where to load the config
    :return: nothing
    """
    operations = {}
    group_aggregates = {}

    with open(config_path, "r") as yaml_file:
        config_dict = load(yaml_file, Loader=Loader)

    host = config_dict['rabbit_params']['host']
    consume_from = config_dict['rabbit_params']['consume_from']
    produce_to = config_dict['rabbit_params']['produce_to']
    messages_to_group = config_dict['rabbit_params']['messages_to_group']
    publisher_sharding = None
    message_set = None
    if 'message_set_params' in config_dict:
        if 'message_set_kind' in config_dict and config_dict[
                'message_set_kind'] == 'DiskMessageSet':
            message_set = DiskMessageSet(**config_dict['message_set_params'])
        else:
            message_set = DiskMessageSetByLastCommit(
                **config_dict['message_set_params'])
    if 'publisher_sharding' in config_dict:
        publisher_sharding = PublisherSharding(
            **config_dict['publisher_sharding'])
    for group_aggregate in config_dict['group_aggregates']:
        op = GroupAggregate.factory(group_aggregate['type'],
                                    **group_aggregate['args'])
        group_aggregates[group_aggregate['name']] = op
    for operation in config_dict['operations']:
        for k, v in operation['args'].items():
            if isinstance(v, str) and v in func_dict:
                operation['args'][k] = func_dict[v]
        if operation['type'] == 'GroupBy':
            operation['args']['aggregates'] = [
                group_aggregates[agg_name]
                for agg_name in operation['args']['aggregates']
            ]
        op = Operation.factory(operation['type'], **operation['args'])
        operations[operation['name']] = op
    if 'message_pipeline_kwargs' in config_dict:
        message_pipeline = MessagePipeline(
            [
                operations[op_name]
                for op_name in config_dict['message_pipeline']
            ],
            idempotency_set=message_set,
            **config_dict['message_pipeline_kwargs'])
    else:
        message_pipeline = MessagePipeline([
            operations[op_name] for op_name in config_dict['message_pipeline']
        ],
                                           idempotency_set=message_set)
    return ConsumerProducerServiceConfig(host=host,
                                         consume_from=consume_from,
                                         produce_to=produce_to,
                                         messages_to_group=messages_to_group,
                                         message_pipeline=message_pipeline,
                                         publisher_sharding=publisher_sharding)