def read_state(customer_id, broker_id): service_dir = settings.ServiceDir('service_message_broker') keepers_dir = os.path.join(service_dir, 'keepers') broker_dir = os.path.join(keepers_dir, broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id, json_value=json_value) return json_value broker_idurl = global_id.glob2idurl(broker_id) if id_url.is_cached(broker_idurl): for one_broker_id in os.listdir(keepers_dir): one_broker_idurl = global_id.glob2idurl(one_broker_id) if id_url.is_cached(one_broker_idurl): if one_broker_idurl == broker_idurl: broker_dir = os.path.join(keepers_dir, one_broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=one_broker_id, json_value=json_value) return json_value return None
def _do_lookup_one_broker(self, broker_pos, skip_brokers): if _Debug: lg.args(_DebugLevel, broker_pos=broker_pos, skip_brokers=skip_brokers, connecting_brokers=self.connecting_brokers) exclude_brokers = set() for known_broker_id in groups.known_brokers(self.group_creator_id): if known_broker_id: exclude_brokers.add( global_id.glob2idurl(known_broker_id, as_field=False)) for connected_broker_idurl in self.connected_brokers.values(): exclude_brokers.add(id_url.to_bin(connected_broker_idurl)) for skip_idurl in skip_brokers: if skip_idurl: exclude_brokers.add(id_url.to_bin(skip_idurl)) if self.dead_broker_id: exclude_brokers.add( global_id.glob2idurl(self.dead_broker_id, as_field=False)) result = p2p_service_seeker.connect_random_node( lookup_method=lookup.random_message_broker, service_name='service_message_broker', service_params=lambda idurl: self. _do_prepare_service_request_params(idurl, broker_pos), exclude_nodes=list(exclude_brokers), ) result.addCallback(self._on_broker_hired, broker_pos) if _Debug: result.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='group_member._do_lookup_one_broker') result.addErrback(self._on_message_broker_lookup_failed, broker_pos) return result
def cache_message(data, message_id, sender_id, recipient_id, message_type=None, direction=None): if _Debug: lg.args(_DebugLevel, message_id=message_id, sender_id=sender_id, recipient_id=recipient_id, message_type=message_type) if message_type == 'private_message': if not my_keys.is_key_registered(sender_id): sender_idurl = global_id.glob2idurl(sender_id) known_ident = identitycache.FromCache(sender_idurl) if not known_ident: lg.warn('sender identity %r was not cached, not possible to store message locally' % sender_idurl) return False if not my_keys.register_key(sender_id, known_ident.getPublicKey()): lg.err('failed to register known public key of the sender: %r' % sender_id) return False if not my_keys.is_key_registered(recipient_id): recipient_idurl = global_id.glob2idurl(recipient_id) known_ident = identitycache.FromCache(recipient_idurl) if not known_ident: lg.warn('recipient identity %r was not cached, not possible to store message locally' % recipient_idurl) return False if not my_keys.register_key(recipient_id, known_ident.getPublicKey()): lg.err('failed to register known public key of the recipient: %r' % recipient_id) return False return store_message(data, message_id, sender_id, recipient_id, message_type, direction) if message_type == 'group_message' or message_type == 'personal_message': if not my_keys.is_key_registered(recipient_id): lg.err('failed to cache %r because recipient key %r was not registered' % (message_type, recipient_id, )) return False return store_message(data, message_id, sender_id, recipient_id, message_type, direction) raise Exception('unexpected message type: %r' % message_type)
def doSendMyListFiles(self, *args, **kwargs): """ Action method. """ json_list_files = backup_fs.Serialize( customer_idurl=global_id.glob2idurl(self.key_id), to_json=True, filter_cb=lambda path_id, path, info: True if strng.to_text( info.key_id) == strng.to_text(self.key_id) else False, ) raw_list_files = serialization.DictToBytes(json_list_files, keys_to_text=True, values_to_text=True, encoding='utf-8') if _Debug: lg.out( _DebugLevel, 'shared_access_donor.doSendMyListFiles prepared list of files for %s :\n%s' % (self.remote_idurl, raw_list_files)) block = encrypted.Block( CreatorID=my_id.getIDURL(), BackupID=self.key_id, Data=raw_list_files, SessionKey=key.NewSessionKey( session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), EncryptKey=self.key_id, ) encrypted_list_files = block.Serialize() packet_id = "%s:%s" % ( self.key_id, packetid.UniqueID(), ) p2p_service.SendFiles( idurl=self.remote_idurl, raw_list_files_info=encrypted_list_files, packet_id=packet_id, callbacks={ commands.Ack(): lambda response, _: self.automat('list-files-ok', response), commands.Fail(): lambda response, _: self.automat('fail', Exception(str(response))), None: lambda pkt_out: self.automat('fail', Exception('timeout')), }, )
def doInit(self, *args, **kwargs): """ Action method. """ self.queue_id = kwargs['queue_id'] self.start_sequence_id = kwargs['start_sequence_id'] self.end_sequence_id = kwargs['end_sequence_id'] self.archive_folder_path = kwargs['archive_folder_path'] qa, oid, _ = global_id.SplitGlobalQueueID(self.queue_id) self.queue_alias = qa self.queue_owner_id = oid self.queue_owner_idurl = global_id.glob2idurl(self.queue_owner_id) self.group_key_id = my_keys.make_key_id( alias=self.queue_alias, creator_glob_id=self.queue_owner_id) self.suppliers_list = [] self.ecc_map = None self.correctable_errors = 0 self.requested_list_files = {}
def doInit(self, *args, **kwargs): """ Action method. """ self.queue_id = kwargs['queue_id'] self.archive_info = kwargs['archive_info'] self.archive_folder_path = kwargs['archive_folder_path'] self.result_defer = kwargs.get('result_defer') qa, oid, _ = global_id.SplitGlobalQueueID(self.queue_id) self.queue_alias = qa self.queue_owner_id = oid self.queue_owner_idurl = global_id.glob2idurl(self.queue_owner_id) self.group_key_id = my_keys.make_key_id( alias=self.queue_alias, creator_glob_id=self.queue_owner_id) self.backup_job = None self.backup_max_block_num = None self.suppliers_list = [] self.ecc_map = None self.correctable_errors = 0 self.packets_out = {}
def get_creator_idurl(key_id, as_field=True): """ Returns creator IDURL from the key_id. """ _, _, creator_glob_id = key_id.partition('$') return global_id.glob2idurl(creator_glob_id, as_field=as_field)
def on_identity_url_changed(evt): from access import group_member service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') brokers_dir = os.path.join(service_dir, 'brokers') old_idurl = id_url.field(evt.data['old_idurl']) new_idurl = id_url.field(evt.data['new_idurl']) active_group_keys = list(active_groups()) to_be_reconnected = [] for group_key_id in active_group_keys: if not group_key_id: continue group_creator_idurl = global_id.glob2idurl(group_key_id) if id_url.is_the_same(group_creator_idurl, old_idurl): old_group_path = os.path.join(groups_dir, group_key_id) latest_group_key_id = my_keys.latest_key_id(group_key_id) latest_group_path = os.path.join(groups_dir, latest_group_key_id) lg.info('going to rename rotated group file: %r -> %r' % (old_group_path, latest_group_path, )) if os.path.isfile(old_group_path): try: os.rename(old_group_path, latest_group_path) except: lg.exc() continue else: lg.warn('key file %r was not found, key was not renamed' % old_group_path) active_groups()[latest_group_key_id] = active_groups().pop(group_key_id) group_member.rotate_active_group_memeber(group_key_id, latest_group_key_id) gm = group_member.get_active_group_member(group_key_id) if gm and gm.connected_brokers and id_url.is_in(old_idurl, gm.connected_brokers.values()): lg.info('connected broker %r IDURL is rotated, going to reconnect %r' % (old_idurl, gm, )) if group_key_id not in to_be_reconnected: to_be_reconnected.append(group_key_id) known_customers = list(known_brokers().keys()) for customer_id in known_customers: latest_customer_id = global_id.idurl2glob(new_idurl) customer_idurl = global_id.glob2idurl(customer_id) if id_url.is_the_same(customer_idurl, old_idurl): latest_customer_dir = os.path.join(brokers_dir, latest_customer_id) lg.info('going to rename rotated customer id: %r -> %r' % (customer_id, latest_customer_id, )) old_customer_dir = os.path.join(brokers_dir, customer_id) if os.path.isdir(old_customer_dir): try: bpio.move_dir_recursive(old_customer_dir, latest_customer_dir) bpio.rmdir_recursive(old_customer_dir) except: lg.exc() continue known_brokers()[latest_customer_id] = known_brokers().pop(customer_id) for broker_pos, broker_id in enumerate(known_brokers(latest_customer_id)): if not broker_id: continue broker_idurl = global_id.glob2idurl(broker_id) if broker_idurl == old_idurl: latest_broker_id = global_id.idurl2glob(new_idurl) latest_broker_path = os.path.join(latest_customer_dir, latest_broker_id) lg.info('going to rename rotated broker id: %r -> %r' % (broker_id, latest_broker_id, )) old_broker_path = os.path.join(latest_customer_dir, broker_id) if os.path.isfile(old_broker_path): try: os.rename(old_broker_path, latest_broker_path) except: lg.exc() continue if latest_broker_id in known_brokers(latest_customer_id): lg.warn('broker %r already exist' % latest_broker_id) continue known_brokers()[latest_customer_id][broker_pos] = latest_broker_id if _Debug: lg.args(_DebugLevel, to_be_reconnected=to_be_reconnected) for group_key_id in to_be_reconnected: gm = group_member.get_active_group_member(group_key_id) if gm: gm.automat('reconnect')
def build_json_conversation(**record): conv = { 'key_id': '', 'label': '', 'state': 'OFFLINE', 'index': None, 'id': None, 'name': None, 'repr': None, 'events': None, } conv.update(record) if conv['type'] == 'private_message': local_key_id1, _, local_key_id2 = conv['conversation_id'].partition( '&') try: local_key_id1 = int(local_key_id1) local_key_id2 = int(local_key_id2) except: lg.exc() return None usr1 = my_keys.get_local_key(local_key_id1) usr2 = my_keys.get_local_key(local_key_id2) if not usr1 or not usr2: # lg.warn('%r %r : not found sender or recipient key_id for %r' % (usr1, usr2, conv, )) return None usr1 = usr1.replace('master$', '') usr2 = usr2.replace('master$', '') idurl1 = global_id.glob2idurl(usr1, as_field=True) idurl2 = global_id.glob2idurl(usr2, as_field=True) conv_key_id = None conv_label = None user_idurl = None if (id_url.is_cached(idurl1) and idurl1 == my_id.getIDURL() ) or usr1.split('@')[0] == my_id.getIDName(): user_idurl = idurl2 conv_key_id = global_id.UrlToGlobalID(idurl2, include_key=True) conv_label = conv_key_id.replace('master$', '').split('@')[0] if (id_url.is_cached(idurl2) and idurl2 == my_id.getIDURL() ) or usr2.split('@')[0] == my_id.getIDName(): user_idurl = idurl1 conv_key_id = global_id.UrlToGlobalID(idurl1, include_key=True) conv_label = conv_key_id.replace('master$', '').split('@')[0] if conv_key_id: conv['key_id'] = conv_key_id if conv_label: conv['label'] = conv_label else: conv['label'] = conv_key_id if user_idurl: on_st = online_status.getInstance(user_idurl, autocreate=False) if on_st: conv.update(on_st.to_json()) elif conv['type'] == 'group_message' or conv['type'] == 'personal_message': local_key_id, _, _ = conv['conversation_id'].partition('&') try: local_key_id = int(local_key_id) except: lg.exc() return None key_id = my_keys.get_local_key(local_key_id) if not key_id: # lg.warn('key_id was not found for %r' % conv) return None conv['key_id'] = key_id conv['label'] = my_keys.get_label(key_id) or key_id gm = group_member.get_active_group_member(key_id) if gm: conv.update(gm.to_json()) return conv
def _do_connect_lookup_rotate_brokers(self, existing_brokers): if _Debug: lg.args(_DebugLevel, existing_brokers=existing_brokers) self.hired_brokers = {} self.connected_brokers = {} self.connecting_brokers = set() self.missing_brokers = set() self.rotated_brokers = [] known_brokers = [ None, ] * groups.REQUIRED_BROKERS_COUNT brokers_to_be_connected = [] top_broker_pos = None for broker_pos in range(groups.REQUIRED_BROKERS_COUNT): broker_at_position = None for existing_broker in existing_brokers: if existing_broker[ 'position'] == broker_pos and existing_broker[ 'broker_idurl']: broker_at_position = existing_broker break if not broker_at_position: lg.warn('not found broker for %r at position %d' % ( self.group_key_id, broker_pos, )) self.missing_brokers.add(broker_pos) continue try: broker_idurl = broker_at_position['broker_idurl'] except IndexError: broker_idurl = None if not broker_idurl: self.missing_brokers.add(broker_pos) lg.warn('broker is empty for %r at position %d' % ( self.group_key_id, broker_pos, )) continue if self.dead_broker_id: if global_id.glob2idurl( self.dead_broker_id) == id_url.field(broker_idurl): self.missing_brokers.add(broker_pos) lg.warn( 'for %r broker %r is marked "dead" at position %d' % ( self.group_key_id, self.dead_broker_id, broker_pos, )) continue known_brokers[broker_pos] = broker_idurl brokers_to_be_connected.append(( broker_pos, broker_idurl, )) if _Debug: lg.dbg( _DebugLevel, 'found broker %r at position %r for %r' % ( broker_idurl, broker_pos, self.group_key_id, )) if top_broker_pos is None: top_broker_pos = broker_pos if broker_pos < top_broker_pos: top_broker_pos = broker_pos if _Debug: lg.args(_DebugLevel, known_brokers=known_brokers, missing_brokers=self.missing_brokers) if top_broker_pos is None: lg.info('did not found any existing brokers, starting new lookups') self._do_lookup_connect_brokers(hiring_positions=list( range(groups.REQUIRED_BROKERS_COUNT)), ) return if top_broker_pos == 0: if self.missing_brokers: lg.warn('top broker is found, but there are missing brokers') else: lg.info('did not found any missing brokers') self._do_lookup_connect_brokers( hiring_positions=list(self.missing_brokers), available_brokers=brokers_to_be_connected, exclude_idurls=list(filter(None, known_brokers)), ) return self.rotated_brokers = [ None, ] * groups.REQUIRED_BROKERS_COUNT brokers_to_be_connected = [] exclude_from_lookup = set() self.missing_brokers = set() for pos in list(range(groups.REQUIRED_BROKERS_COUNT)): known_pos = pos + top_broker_pos if known_pos < groups.REQUIRED_BROKERS_COUNT: self.rotated_brokers[pos] = known_brokers[known_pos] if self.rotated_brokers[pos]: brokers_to_be_connected.append(( pos, self.rotated_brokers[pos], )) exclude_from_lookup.add(self.rotated_brokers[pos]) else: self.missing_brokers.add(pos) lg.info( 'brokers were rotated, starting new lookups and connect to existing brokers' ) exclude_from_lookup.update(set(filter(None, known_brokers))) if self.dead_broker_id: exclude_from_lookup.add( global_id.glob2idurl(self.dead_broker_id, as_field=False)) self._do_lookup_connect_brokers( hiring_positions=list(self.missing_brokers), available_brokers=brokers_to_be_connected, exclude_idurls=list(exclude_from_lookup), )