def _do_remember_brokers(self, event, *args, **kwargs): if _Debug: lg.args(_DebugLevel, event, *args, **kwargs) self.active_broker_id = None self.active_queue_id = None for position in range(groups.REQUIRED_BROKERS_COUNT): broker_idurl = self.connected_brokers.get(position) if not broker_idurl: groups.clear_broker(self.group_creator_id, position) continue broker_id = global_id.idurl2glob(broker_idurl) groups.set_broker(self.group_creator_id, broker_id, position) if position == 0: self.dead_broker_id = None self.active_broker_id = broker_id self.active_queue_id = global_id.MakeGlobalQueueID( queue_alias=self.group_queue_alias, owner_id=self.group_creator_id, supplier_id=self.active_broker_id, ) if self.active_broker_id is None: if 0 in self.hired_brokers: self.active_broker_id = self.hired_brokers[0] self.active_queue_id = global_id.MakeGlobalQueueID( queue_alias=self.group_queue_alias, owner_id=self.group_creator_id, supplier_id=self.active_broker_id, ) if self.active_broker_id is None: raise Exception('no active broker is connected after event %r' % event) self.rotated_brokers = [] self.hired_brokers.clear() self.missing_brokers.clear() self.connecting_brokers.clear()
def _do_prepare_service_request_params(self, possible_broker_idurl, desired_broker_position=-1, action='queue-connect'): queue_id = global_id.MakeGlobalQueueID( queue_alias=self.group_queue_alias, owner_id=self.group_creator_id, supplier_id=global_id.idurl2glob(possible_broker_idurl), ) group_key_info = my_keys.get_key_info(self.group_key_id, include_private=False, include_signature=True) service_request_params = { 'action': action, 'queue_id': queue_id, 'consumer_id': self.member_id, 'producer_id': self.member_id, 'group_key': group_key_info, 'last_sequence_id': self.last_sequence_id, 'archive_folder_path': groups.get_archive_folder_path(self.group_key_id), } if desired_broker_position >= 0: service_request_params['position'] = desired_broker_position if _Debug: lg.args(_DebugLevel, service_request_params=service_request_params) return service_request_params
def doCancelServiceQueue(self, *args, **kwargs): """ Action method. """ service_info = { 'items': [{ 'scope': 'consumer', 'action': 'unsubscribe', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'queue_id': global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=my_id.getGlobalID(), supplier_id=global_id.MakeGlobalID(idurl=self.supplier_idurl), ), }, { 'scope': 'consumer', 'action': 'remove_callback', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'method': strng.to_text(my_id.getLocalID()), }, { 'scope': 'consumer', 'action': 'stop', 'consumer_id': strng.to_text(my_id.getGlobalID()), }, ], } p2p_service.SendCancelService( remote_idurl=self.supplier_idurl, service_name='service_p2p_notifications', json_payload=service_info, callbacks={ commands.Ack(): self._supplier_acked, commands.Fail(): self._supplier_failed, }, )
def doRememberBroker(self, *args, **kwargs): """ Action method. """ self.active_broker_id = args[0] self.active_queue_id = global_id.MakeGlobalQueueID( queue_alias=self.group_glob_id['key_alias'], owner_id=self.group_creator_id, supplier_id=self.active_broker_id, )
def doRequestQueueService(self, *args, **kwargs): """ Action method. """ if not self.queue_subscribe: reactor.callLater(0, self.automat, 'queue-skip') # @UndefinedVariable return service_info = { 'items': [ { 'scope': 'consumer', 'action': 'start', 'consumer_id': strng.to_text(my_id.getGlobalID()), }, { 'scope': 'consumer', 'action': 'add_callback', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'method': strng.to_text(my_id.getLocalID()), }, { 'scope': 'consumer', 'action': 'subscribe', 'consumer_id': strng.to_text(my_id.getGlobalID()), 'queue_id': global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=my_id.getGlobalID(), supplier_id=global_id.MakeGlobalID( idurl=self.supplier_idurl), ), }, ], } request = p2p_service.SendRequestService( remote_idurl=self.supplier_idurl, service_name='service_p2p_notifications', json_payload=service_info, callbacks={ commands.Ack(): self._supplier_queue_acked, commands.Fail(): self._supplier_queue_failed, }, ) self.request_queue_packet_id = request.PacketID
def doRequestQueueService(self, arg): """ Action method. """ if not self.queue_subscribe: self.automat('fail') return service_info = { 'items': [ { 'scope': 'consumer', 'action': 'start', 'consumer_id': my_id.getGlobalID(), }, { 'scope': 'consumer', 'action': 'add_callback', 'consumer_id': my_id.getGlobalID(), 'method': my_id.getLocalID(), }, { 'scope': 'consumer', 'action': 'subscribe', 'consumer_id': my_id.getGlobalID(), 'queue_id': global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=my_id.getGlobalID(), supplier_id=global_id.MakeGlobalID( idurl=self.supplier_idurl), ), }, ], } p2p_service.SendRequestService( remote_idurl=self.supplier_idurl, service_name='service_p2p_notifications', json_payload=service_info, callbacks={ commands.Ack(): self._supplier_acked, commands.Fail(): self._supplier_failed, }, )
def on_customer_accepted(evt): customer_idurl = id_url.field(evt.data.get('idurl')) if not customer_idurl: lg.warn('unknown customer idurl in event data payload') return False customer_glob_id = global_id.idurl2glob(customer_idurl) queue_id = global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=customer_glob_id, supplier_id=my_id.getGlobalID(), ) if not p2p_queue.is_queue_exist(queue_id): customer_key_id = global_id.MakeGlobalID(customer=customer_glob_id, key_alias='customer') if my_keys.is_key_registered(customer_key_id): try: p2p_queue.open_queue(queue_id) except Exception as exc: lg.warn('failed to open queue %s : %s' % (queue_id, str(exc))) else: lg.warn('customer key %r for supplier queue not registered' % customer_key_id) if p2p_queue.is_queue_exist(queue_id): if not p2p_queue.is_producer_exist(my_id.getGlobalID()): try: p2p_queue.add_producer(my_id.getGlobalID()) except Exception as exc: lg.warn('failed to add producer: %s' % str(exc)) if p2p_queue.is_producer_exist(my_id.getGlobalID()): if not p2p_queue.is_producer_connected(my_id.getGlobalID(), queue_id): try: p2p_queue.connect_producer(my_id.getGlobalID(), queue_id) except Exception as exc: lg.warn('failed to connect producer: %s' % str(exc)) if p2p_queue.is_producer_connected(my_id.getGlobalID(), queue_id): if not p2p_queue.is_event_publishing(my_id.getGlobalID(), 'supplier-file-modified'): try: p2p_queue.start_event_publisher( my_id.getGlobalID(), 'supplier-file-modified') except Exception as exc: lg.warn('failed to start event publisher: %s' % str(exc)) return True
def _on_customer_accepted(self, e): from logs import lg from userid import my_id from userid import global_id from p2p import p2p_queue customer_idurl = e.data.get('idurl') if not customer_idurl: lg.warn('unknown customer idurl in event data payload') return customer_glob_id = global_id.idurl2glob(customer_idurl) queue_id = global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=customer_glob_id, supplier_id=my_id.getGlobalID(), ) if not p2p_queue.is_queue_exist(queue_id): try: p2p_queue.open_queue(queue_id) except Exception as exc: lg.warn('failed to open queue %s : %s' % (queue_id, str(exc))) if p2p_queue.is_queue_exist(queue_id): if not p2p_queue.is_producer_exist(my_id.getGlobalID()): try: p2p_queue.add_producer(my_id.getGlobalID()) except Exception as exc: lg.warn('failed to add producer: %s' % str(exc)) if p2p_queue.is_producer_exist(my_id.getGlobalID()): if not p2p_queue.is_producer_connected(my_id.getGlobalID(), queue_id): try: p2p_queue.connect_producer(my_id.getGlobalID(), queue_id) except Exception as exc: lg.warn('failed to connect producer: %s' % str(exc)) if p2p_queue.is_producer_connected(my_id.getGlobalID(), queue_id): if not p2p_queue.is_event_publishing( my_id.getGlobalID(), 'supplier-file-modified'): try: p2p_queue.start_event_publisher( my_id.getGlobalID(), 'supplier-file-modified') except Exception as exc: lg.warn('failed to start event publisher: %s' % str(exc))
def _on_customer_terminated(self, e): from logs import lg from userid import my_id from userid import global_id from p2p import p2p_queue customer_idurl = e.data.get('idurl') if not customer_idurl: lg.warn('unknown customer idurl in event data payload') return customer_glob_id = global_id.idurl2glob(customer_idurl) queue_id = global_id.MakeGlobalQueueID( queue_alias='supplier-file-modified', owner_id=customer_glob_id, supplier_id=my_id.getGlobalID(), ) # TODO: need to decide when to stop producing # might be that other customers needs that info still if p2p_queue.is_event_publishing(my_id.getGlobalID(), 'supplier-file-modified'): try: p2p_queue.stop_event_publisher(my_id.getGlobalID(), 'supplier-file-modified') except Exception as exc: lg.warn('failed to stop event publisher: %s' % str(exc)) if p2p_queue.is_producer_connected(my_id.getGlobalID(), queue_id): try: p2p_queue.disconnect_producer(my_id.getGlobalID(), queue_id) except Exception as exc: lg.warn('failed to disconnect producer: %s' % str(exc)) if p2p_queue.is_producer_exist(my_id.getGlobalID()): try: p2p_queue.remove_producer(my_id.getGlobalID()) except Exception as exc: lg.warn('failed to remove producer: %s' % str(exc)) if p2p_queue.is_queue_exist(queue_id): try: p2p_queue.close_queue(queue_id) except Exception as exc: lg.warn('failed to stop queue %s : %s' % (queue_id, str(exc)))
def on_event_packet_received(newpacket, info, status, error_message): if newpacket.Command != commands.Event(): return False try: e_json = json.loads(newpacket.Payload) event_id = e_json['event_id'] payload = e_json['payload'] queue_id = e_json.get('queue_id') producer_id = e_json.get('producer_id') message_id = e_json.get('message_id') created = e_json.get('created') except: lg.warn("invlid json payload") return False if queue_id and producer_id and message_id: # this message have an ID and producer so it came from a queue and needs to be consumed # also add more info comming from the queue if _Debug: lg.warn('received event from the queue at %s' % queue_id) payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True # this message does not have nor ID nor producer so it came from another user directly # lets' try to find a queue for that event and see if we need to publish it or not queue_id = global_id.MakeGlobalQueueID( queue_alias=event_id, owner_id=global_id.MakeGlobalID(idurl=newpacket.OwnerID), supplier_id=global_id.MakeGlobalID(idurl=my_id.getGlobalID()), ) if queue_id not in queue(): # such queue is not found locally, that means message is # probably addressed to that node and needs to be consumed directly if _Debug: lg.warn( 'received event was not delivered to any queue, consume now and send an Ack' ) # also add more info comming from the queue payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True # found a queue for that message, pushing there # TODO: add verification of producer's identity and signature if _Debug: lg.warn('pushing event to the queue %s on behalf of producer %s' % (queue_id, producer_id)) try: push_message( producer_id=producer_id, queue_id=queue_id, data=payload, creation_time=created, ) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return True p2p_service.SendAck(newpacket) return True
def do_handle_event_packet(newpacket, e_json): event_id = strng.to_text(e_json['event_id']) payload = e_json['payload'] queue_id = strng.to_text(e_json.get('queue_id')) producer_id = e_json.get('producer_id') message_id = strng.to_text(e_json.get('message_id')) created = strng.to_text(e_json.get('created')) if _Debug: lg.args(_DebugLevel, event_id=event_id, queue_id=queue_id, producer_id=producer_id, message_id=message_id) if queue_id and producer_id and message_id: # this message have an ID and producer so it came from a queue and needs to be consumed # also needs to be attached more info coming from the queue to the event body if _Debug: lg.info('received new event %s from the queue at %s' % ( event_id, queue_id, )) payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True if producer_id == my_id.getID() and not queue_id: # this message addressed to me but not to any queue exclusively return True # this message does not have nor ID nor producer so it came from another user directly # lets' try to find a queue for that event and see if we need to publish it or not queue_id = global_id.MakeGlobalQueueID( queue_alias=event_id, owner_id=global_id.MakeGlobalID(idurl=newpacket.OwnerID), supplier_id=global_id.MakeGlobalID(idurl=my_id.getGlobalID()), ) if queue_id not in queue(): # such queue is not found locally, that means message is # probably addressed to that node and needs to be consumed directly if _Debug: lg.warn( 'received event %s was not delivered to any queue, consume now and send an Ack' % event_id) # also add more info comming from the queue payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True # found a queue for that message, pushing there # TODO: add verification of producer's identity and signature if _Debug: lg.info('pushing event %s to the queue %s on behalf of producer %s' % (event_id, queue_id, producer_id)) try: write_message( producer_id=producer_id, queue_id=queue_id, data=payload, creation_time=created, ) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return True p2p_service.SendAck(newpacket) return True