def _do_lookup_one_broker(self, broker_pos, skip_brokers): if _Debug: lg.args(_DebugLevel, broker_pos=broker_pos, skip_brokers=skip_brokers, connecting_brokers=self.connecting_brokers) exclude_brokers = set() for known_broker_id in groups.known_brokers(self.group_creator_id): if known_broker_id: exclude_brokers.add( global_id.glob2idurl(known_broker_id, as_field=False)) for connected_broker_idurl in self.connected_brokers.values(): exclude_brokers.add(id_url.to_bin(connected_broker_idurl)) for skip_idurl in skip_brokers: if skip_idurl: exclude_brokers.add(id_url.to_bin(skip_idurl)) if self.dead_broker_id: exclude_brokers.add( global_id.glob2idurl(self.dead_broker_id, as_field=False)) result = p2p_service_seeker.connect_random_node( lookup_method=lookup.random_message_broker, service_name='service_message_broker', service_params=lambda idurl: self. _do_prepare_service_request_params(idurl, broker_pos), exclude_nodes=list(exclude_brokers), ) result.addCallback(self._on_broker_hired, broker_pos) if _Debug: result.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='group_member._do_lookup_one_broker') result.addErrback(self._on_message_broker_lookup_failed, broker_pos) return result
def _do_verify(dht_value, position, broker_result): if _Debug: lg.args(_DebugLevel, dht_value=dht_value, position=position, broker_result=broker_result) ret = { 'timestamp': None, 'revision': 0, 'customer_idurl': customer_idurl, 'broker_idurl': None, 'position': position, 'archive_folder_path': None, } if not dht_value or not isinstance(dht_value, dict): broker_result.callback(ret) return ret try: if as_fields: _customer_idurl = id_url.field(dht_value['customer_idurl']) _broker_idurl = id_url.field(dht_value['broker_idurl']) else: _customer_idurl = id_url.to_bin(dht_value['customer_idurl']) _broker_idurl = id_url.to_bin(dht_value['broker_idurl']) _position = int(dht_value['position']) _archive_folder_path = strng.to_text( dht_value['archive_folder_path']) _revision = int(dht_value.get('revision')) _timestamp = int(dht_value.get('timestamp')) except: lg.exc() broker_result.callback(ret) return ret if as_fields: if _customer_idurl != customer_idurl: lg.err( 'wrong customer idurl %r in message broker DHT record for %r at position %d' % (_customer_idurl, customer_idurl, position)) broker_result.callback(ret) return ret if position != _position: lg.err( 'wrong position value %d in message broker DHT record for %r at position %d' % (_position, customer_idurl, position)) broker_result.callback(ret) return ret ret.update({ 'customer_idurl': _customer_idurl, 'broker_idurl': _broker_idurl, 'position': _position, 'archive_folder_path': _archive_folder_path, 'revision': _revision, 'timestamp': _timestamp, }) _do_broker_identity_cache(ret, position, broker_result) return None
def _do_verify(dht_value, customer_idurl_bin): if customer_idurl_bin in rotated_idurls: rotated_idurls.remove(customer_idurl_bin) ret = { 'suppliers': [], 'ecc_map': None, 'customer_idurl': customer_idurl, 'revision': 0, 'publisher_idurl': None, 'timestamp': None, } if not dht_value or not isinstance(dht_value, dict): if not rotated_idurls: result.callback(ret) return ret another_customer_idurl_bin = rotated_idurls.pop(0) lg.warn( 'found another rotated idurl %r and re-try reading customer suppliers' % another_customer_idurl_bin) d = dht_records.get_suppliers(another_customer_idurl_bin, return_details=True, use_cache=False) d.addCallback(_do_verify, another_customer_idurl_bin) d.addErrback(_on_error) return ret try: _ecc_map = strng.to_text(dht_value['ecc_map']) if as_fields: _customer_idurl = id_url.field(dht_value['customer_idurl']) _publisher_idurl = id_url.field( dht_value.get('publisher_idurl')) _suppliers_list = id_url.fields_list(dht_value['suppliers']) else: _customer_idurl = id_url.to_bin(dht_value['customer_idurl']) _publisher_idurl = id_url.to_bin( dht_value.get('publisher_idurl')) _suppliers_list = id_url.to_bin_list(dht_value['suppliers']) _revision = int(dht_value.get('revision')) _timestamp = int(dht_value.get('timestamp')) except: lg.exc() result.callback(ret) return ret ret.update({ 'suppliers': _suppliers_list, 'ecc_map': _ecc_map, 'customer_idurl': _customer_idurl, 'revision': _revision, 'publisher_idurl': _publisher_idurl, 'timestamp': _timestamp, }) return _do_identity_cache(ret)
def _do_cancel_outbox_packets(self, fail_info): to_idurl = id_url.field(fail_info['to']) from_idurl = id_url.field(fail_info['from']) for p in packet_out.search_by_packet_id(fail_info['packet_id']): if p.outpacket.Command == fail_info['command']: if id_url.to_bin(to_idurl) == p.outpacket.RemoteID.to_bin(): if p.outpacket.CreatorID.to_bin() == id_url.to_bin( from_idurl) or p.outpacket.OwnerID.to_bin( ) == id_url.to_bin(from_idurl): if _Debug: lg.dbg( _DebugLevel, 'about to cancel %r because sending via proxy transport is failed' % p) p.automat('cancel')
def StopOverridingIdentity(idurl): """ """ global _OverriddenIdentities idurl = id_url.field(idurl) if not idurl.is_latest(): if idurl.original() in _OverriddenIdentities: if idurl.to_bin() not in _OverriddenIdentities: _OverriddenIdentities[ idurl.to_bin()] = _OverriddenIdentities.pop( idurl.original()) lg.info( 'detected and processed idurl rotate for overridden identity : %r -> %r' % (idurl.original(), idurl.to_bin())) idurl = id_url.to_bin(idurl) result = _OverriddenIdentities.pop(idurl, None) if _Debug: lg.out( _DebugLevel, 'identitycache.StopOverridingIdentity removed overridden source for %s' % idurl) if result: lg.out( _DebugLevel, ' previous overridden identity was %d bytes' % len(result)) lg.out( _DebugLevel, ' total number of overrides is %d' % len(_OverriddenIdentities)) return result
def isStillNeeded(self, *args, **kwargs): """ Condition method. """ supplier_idurl = args[0] current_suppliers = contactsdb.suppliers() if supplier_idurl in current_suppliers: # this guy is already a supplier, we still need more then return True desired_number = settings.getSuppliersNumberDesired() needed_suppliers = current_suppliers[:desired_number] empty_suppliers = needed_suppliers.count(id_url.field(b'')) # if '' in needed_suppliers: # lg.warn('found empty suppliers!!!') # return True s = set(id_url.to_bin_list(needed_suppliers)) s.add(id_url.to_bin(supplier_idurl)) s.difference_update(set(id_url.to_bin_list(self.dismiss_list))) result = len(s) - empty_suppliers < settings.getSuppliersNumberDesired( ) # if _Debug: # lg.out(_DebugLevel, 'fire_hire.isStillNeeded %d %d %d %d %d, result=%s' % ( # contactsdb.num_suppliers(), len(needed_suppliers), len(self.dismiss_list), # len(s), settings.getSuppliersNumberDesired(), result)) return result
def _on_node_observed(self, idurl, node): if self.stopped: lg.warn( 'DiscoveryTask[%r] : node observed, but discovery process already stopped' % self.id) return None idurl = id_url.to_bin(idurl) if _Debug: lg.out( _DebugLevel + 4, 'lookup.DiscoveryTask[%r]._on_node_observed %r : %r' % (self.id, node, idurl)) cached_time = known_idurls().get(idurl) if cached_time and time.time() - cached_time < 30.0: if _Debug: lg.out( _DebugLevel + 4, 'lookup.DiscoveryTask[%r]._on_node_observed SKIP processing node %r because already observed recently' % (self.id, idurl)) self._on_identity_cached(idurl, node) return idurl d = self.process_method(idurl, node) d.addCallback(self._on_identity_cached, node) d.addErrback(self._on_node_proces_failed, node) return idurl
def _do_clean_sent_packet(self, info): to_idurl = id_url.to_bin(info['to']) to_remove = [] for _key in self.sent_packets.keys(): routed_packet, outpacket = self.sent_packets.get( _key, ( None, None, )) if not outpacket: if _Debug: lg.dbg(_DebugLevel, 'found empty outpacket : %r' % routed_packet) to_remove.append(_key) continue if outpacket.Command != info['command']: continue if outpacket.PacketID != info['packet_id']: continue if outpacket.RemoteID.to_bin() != to_idurl: continue to_remove.append(_key) for _key in to_remove: routed_packet, outpacket = self.sent_packets.pop( _key, ( None, None, ))
def _on_identity_cached(self, idurl, node): if self.stopped: return None if not idurl: self._on_node_process_failed(None, node) return None if id_url.is_in(idurl, self.ignore_idurls): if _Debug: lg.dbg( _DebugLevel, 'lookup.DiscoveryTask[%r]._on_identity_cached IGNORE %r' % (self.id, idurl)) self._on_node_process_failed(None, node) return None self.cached_count += 1 idurl = id_url.to_bin(idurl) if idurl not in discovered_idurls(layer_id=self.layer_id): discovered_idurls(layer_id=self.layer_id).append(idurl) known_idurls()[idurl] = time.time() self._on_node_succeed(node, idurl) if _Debug: lg.out( _DebugLevel, 'lookup.DiscoveryTask[%r]._on_identity_cached : %s' % (self.id, idurl)) return idurl
def on_message_delivered(idurl, json_data, recipient_global_id, packet_id, response, info, result_defer=None): global _LastUserPingTime idurl = id_url.to_bin(idurl) if _Debug: lg.args(_DebugLevel, packet_id=packet_id, recipient_global_id=recipient_global_id) _LastUserPingTime[idurl] = time.time() if result_defer and not result_defer.called: result_defer.callback(response)
def on_ping_success(ok, idurl): global _LastUserPingTime idurl = id_url.to_bin(idurl) _LastUserPingTime[idurl] = time.time() lg.info('shake up hands %r before sending a message : %s' % ( idurl, ok, )) return ok
def OverrideIdentity(idurl, xml_src): """ Used by proxy router to remember the original identity of the routing node. """ global _OverriddenIdentities idurl = id_url.field(idurl) if not idurl.is_latest(): if idurl.original() in _OverriddenIdentities: if idurl.to_bin() not in _OverriddenIdentities: _OverriddenIdentities[ idurl.to_bin()] = _OverriddenIdentities.pop( idurl.original()) lg.info( 'detected and processed idurl rotate for overridden identity : %r -> %r' % (idurl.original(), idurl.to_bin())) idurl = id_url.to_bin(idurl) xml_src = strng.to_text(xml_src.strip()) if idurl in _OverriddenIdentities: if _OverriddenIdentities[idurl] == xml_src: if _Debug: lg.out( _DebugLevel, 'identitycache.OverrideIdentity SKIPPED "%s", no changes' % idurl) return False if _Debug: lg.out( _DebugLevel, 'identitycache.OverrideIdentity replacing overriden identity %r with new one' % idurl) lg.out(_DebugLevel, '\nOVERRIDDEN OLD:\n' + _OverriddenIdentities[idurl]) lg.out(_DebugLevel, '\nOVERRIDDEN NEW:\n' + xml_src) else: orig = '' if identitydb.has_idurl(idurl): orig = identitydb.get_ident(idurl).serialize(as_text=True) if orig and orig == xml_src: if _Debug: lg.out( _DebugLevel, 'identitycache.OverrideIdentity SKIPPED %r , overridden copy is the same as original' % idurl) return False if _Debug: lg.out( _DebugLevel, 'identitycache.OverrideIdentity replacing original identity for %r' % idurl) lg.out(_DebugLevel, '\nORIGINAL:\n' + orig) lg.out(_DebugLevel, '\nNEW:\n' + xml_src) _OverriddenIdentities[idurl] = xml_src if _Debug: lg.out( _DebugLevel, ' total number of overrides: %d' % len(_OverriddenIdentities)) return True
def add_customer_meta_info(customer_idurl, info): """ """ global _CustomersMetaInfo customer_idurl = id_url.field(customer_idurl) if not customer_idurl.is_latest(): if customer_idurl.original() in _CustomersMetaInfo: if customer_idurl.to_bin() not in _CustomersMetaInfo: _CustomersMetaInfo[ customer_idurl.to_bin()] = _CustomersMetaInfo.pop( customer_idurl.original()) lg.info( 'detected and processed idurl rotate for customer meta info : %r -> %r' % (customer_idurl.original(), customer_idurl.to_bin())) customer_idurl = id_url.to_bin(customer_idurl) if 'family_snapshot' in info: info['family_snapshot'] = id_url.to_bin_list(info['family_snapshot']) if 'ecc_map' in info: info['ecc_map'] = strng.to_text(info['ecc_map']) if customer_idurl not in _CustomersMetaInfo: if _Debug: lg.out( _DebugLevel, 'contactsdb.add_customer_meta_info store new meta info for customer %r: %r' % ( customer_idurl, info, )) _CustomersMetaInfo[customer_idurl] = {} else: if _Debug: lg.out( _DebugLevel, 'contactsdb.add_customer_meta_info update existing meta info for customer %r: %r' % ( customer_idurl, info, )) _CustomersMetaInfo[customer_idurl].update(info) json_info = { k: jsn.dict_keys_to_text(v) for k, v in id_url.to_bin_dict(_CustomersMetaInfo).items() } try: raw_data = jsn.dumps( json_info, indent=2, sort_keys=True, keys_to_text=True, values_to_text=True, ) except: lg.exc() return None local_fs.WriteTextFile(settings.CustomersMetaInfoFilename(), raw_data) return _CustomersMetaInfo
def on_message_failed(idurl, json_data, recipient_global_id, packet_id, response, info, result_defer=None, error=None): global _LastUserPingTime idurl = id_url.to_bin(idurl) lg.err('message %s failed sending to %s in %s because : %r' % ( packet_id, recipient_global_id, response, error, )) if idurl in _LastUserPingTime: _LastUserPingTime[idurl] = 0 if result_defer and not result_defer.called: err = Exception(response) if response else (error if not strng.is_string(error) else Exception(error)) result_defer.errback(err)
def _do_broker_identity_cache(dht_record, position, broker_result): one_broker_task = identitycache.GetLatest( id_url.to_bin(dht_record['broker_idurl'])) one_broker_task.addCallback( lambda xmlsrc: broker_result.callback(dht_record)) one_broker_task.addErrback(_on_borker_identity_cache_failed, position, broker_result) # if _Debug: # lg.args(_DebugLevel, position=position, broker_idurl=dht_record['broker_idurl']) return None
def _do_identity_cache(ret): all_stories = [] for _supplier_idurl in ret['suppliers']: if _supplier_idurl: _supplier_idurl = id_url.to_bin(_supplier_idurl) if not id_url.is_cached( _supplier_idurl) or not identitycache.HasFile( _supplier_idurl): one_supplier_story = identitycache.immediatelyCaching( _supplier_idurl) if _Debug: one_supplier_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache' ) all_stories.append(one_supplier_story) _customer_idurl = id_url.to_bin(ret['customer_idurl']) if _customer_idurl and (not id_url.is_cached(_customer_idurl) or not identitycache.HasFile(_customer_idurl)): one_customer_story = identitycache.immediatelyCaching( _customer_idurl) if _Debug: one_customer_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') all_stories.append(one_customer_story) if _Debug: lg.args(_DebugLevel, all_stories=len(all_stories), ret=ret) id_cache_story = DeferredList(all_stories, consumeErrors=True) id_cache_story.addCallback(_do_save_customer_suppliers, ret) if _Debug: id_cache_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') id_cache_story.addErrback(result.errback) return id_cache_story
def existing(customer_idurl): """ Returns instance of existing `queue_keeper()` or None. """ global _QueueKeepers customer_idurl = id_url.to_bin(customer_idurl) if id_url.is_empty(customer_idurl): return None if not id_url.is_cached(customer_idurl): lg.warn('customer idurl is not cached yet, can not start QueueKeeper()') return None customer_idurl = id_url.field(customer_idurl) return customer_idurl in _QueueKeepers
def _do_verify(dht_value): ret = { 'suppliers': [], 'ecc_map': None, 'customer_idurl': customer_idurl, 'revision': 0, 'publisher_idurl': None, 'timestamp': None, } if not dht_value or not isinstance(dht_value, dict): result.callback(ret) return ret try: _ecc_map = strng.to_text(dht_value['ecc_map']) if as_fields: _customer_idurl = id_url.field(dht_value['customer_idurl']) _publisher_idurl = id_url.field( dht_value.get('publisher_idurl')) _suppliers_list = id_url.fields_list(dht_value['suppliers']) else: _customer_idurl = id_url.to_bin(dht_value['customer_idurl']) _publisher_idurl = id_url.to_bin( dht_value.get('publisher_idurl')) _suppliers_list = id_url.to_bin_list(dht_value['suppliers']) _revision = int(dht_value.get('revision')) _timestamp = int(dht_value.get('timestamp')) except: lg.exc() result.callback(ret) return ret ret.update({ 'suppliers': _suppliers_list, 'ecc_map': _ecc_map, 'customer_idurl': _customer_idurl, 'revision': _revision, 'publisher_idurl': _publisher_idurl, 'timestamp': _timestamp, }) return _do_identity_cache(ret)
def get_customer_meta_info(customer_idurl): """ """ global _CustomersMetaInfo customer_idurl = id_url.field(customer_idurl) if not customer_idurl.is_latest(): if customer_idurl.original() in _CustomersMetaInfo: if customer_idurl.to_bin() not in _CustomersMetaInfo: _CustomersMetaInfo[customer_idurl.to_bin()] = _CustomersMetaInfo.pop(customer_idurl.original()) lg.info('detected and processed idurl rotate for customer meta info : %r -> %r' % ( customer_idurl.original(), customer_idurl.to_bin())) customer_idurl = id_url.to_bin(customer_idurl) return jsn.dict_keys_to_text(jsn.dict_values_to_text(_CustomersMetaInfo.get(customer_idurl, {})))
def ReadOverriddenIdentityXMLSource(idurl): """ """ global _OverriddenIdentities idurl = id_url.field(idurl) if not idurl.is_latest(): if idurl.original() in _OverriddenIdentities: if idurl.to_bin() not in _OverriddenIdentities: _OverriddenIdentities[idurl.to_bin()] = _OverriddenIdentities.pop(idurl.original()) lg.info('detected and processed idurl rotate for overridden identity : %r -> %r' % ( idurl.original(), idurl.to_bin())) idurl = id_url.to_bin(idurl) return _OverriddenIdentities.get(idurl, None)
def _on_request_service_ack(self, response, info): self.router_connection_info = None if response.PacketID not in self.request_service_packet_id: lg.warn('wrong PacketID in response: %s, but outgoing was : %s' % ( response.PacketID, str(self.request_service_packet_id))) self.automat('service-refused', (response, info)) return if response.PacketID in self.request_service_packet_id: self.request_service_packet_id.remove(response.PacketID) else: lg.warn('%s was not found in pending requests: %s' % (response.PacketID, self.request_service_packet_id)) if _Debug: lg.out(_DebugLevel, 'proxy_receiver._on_request_service_ack : %s' % str(response.Payload)) if self.router_idurl != response.CreatorID: lg.err('received unexpected response from another node: %r ~ %r' % (self.router_idurl, response.CreatorID, )) self.automat('service-refused', (response, info)) return service_ack_info = strng.to_text(response.Payload) if service_ack_info.startswith('rejected'): self.automat('service-refused', (response, info)) return active_router_sessions = gateway.find_active_session(info.proto, host=info.host) if not active_router_sessions: active_router_sessions = gateway.find_active_session(info.proto, idurl=id_url.to_bin(response.CreatorID)) if not active_router_sessions: lg.err('active connection with proxy router at %s:%s was not found' % (info.proto, info.host, )) if _Debug: lg.args(_DebugLevel, router_idurl=self.router_idurl, ack_packet=info, active_sessions=gateway.list_active_sessions(info.proto)) self.automat('service-refused', (response, info)) return self.router_connection_info = { 'id': active_router_sessions[0].id, 'index': active_router_sessions[0].index, 'repr': repr(active_router_sessions[0]), 'proto': info.proto, 'host': info.host, 'idurl': self.router_idurl, 'global_id': global_id.UrlToGlobalID(self.router_idurl), } active_router_session_machine = automat.by_index(self.router_connection_info['index']) if active_router_session_machine is None: lg.err('did not found proxy router session state machine instance: %s' % self.router_connection_info) self.router_connection_info = None if _Debug: lg.args(_DebugLevel, automats=automat.objects()) self.automat('service-refused', (response, info)) return lg.info('found active session for proxy router: %s' % active_router_session_machine) self.automat('service-accepted', (response, info, active_router_session_machine))
def _on_cache_retry_success(self, xmlsrc, fail_info): if _Debug: lg.args(_DebugLevel, sent_packets=len(self.sent_packets), fail_info=fail_info) to_idurl = id_url.to_bin(fail_info['to']) for _key in self.sent_packets.keys(): routed_packet, outpacket = self.sent_packets.get( _key, ( None, None, )) if not outpacket: if _Debug: lg.dbg(_DebugLevel, 'found empty outpacket : %r' % routed_packet) continue # if _Debug: # lg.args(_DebugLevel, # routed_packet, # outpacket, # outpacket.Command == fail_info['command'], # outpacket.PacketID == fail_info['packet_id'], # outpacket.RemoteID.to_bin() == to_idurl, # ) if outpacket.Command != fail_info['command']: continue if outpacket.PacketID != fail_info['packet_id']: continue if outpacket.RemoteID.to_bin() != to_idurl: continue routed_retry_packet = self._do_send_packet_to_router( outpacket=outpacket, callbacks=routed_packet.callbacks, wide=fail_info.get('wide', False), keep_alive=fail_info.get('keep_alive', False), response_timeout=fail_info.get('response_timeout', None), is_retry=True, ) if not routed_retry_packet: self.automat('retry-send-failed', fail_info) else: self.sent_packets[_key] = ( routed_retry_packet, outpacket, ) self.automat('retry', fail_info) del routed_packet return None
def consume_discovered_idurls(count=1, layer_id=0): if not discovered_idurls(layer_id=layer_id): if _Debug: lg.out(_DebugLevel, 'lookup.consume_discovered_idurls returns empty list') return [] results = [] while len(results) < count and discovered_idurls(layer_id=layer_id): # random_pos = random.randint(0, len(discovered_idurls(layer_id=layer_id)) - 1) # results.append(id_url.to_bin(discovered_idurls(layer_id=layer_id).pop(random_pos))) results.append( id_url.to_bin(discovered_idurls(layer_id=layer_id).pop(0))) if _Debug: lg.out(_DebugLevel, 'lookup.consume_discovered_idurls : %s' % results) return results
def on_idurl_response(response, result): if _Debug: lg.out(_DebugLevel, 'lookup.on_idurl_response : %r' % response) responded_idurl = response.get('idurl') if not responded_idurl: result.errback(Exception('idurl observe failed')) return response try: idurl = id_url.to_bin(responded_idurl) except: lg.exc() result.errback(Exception('idurl observe failed')) return response result.callback(idurl) return response
def doStopListening(self, *args, **kwargs): """ Action method. """ if _Debug: lg.args(_DebugLevel, router_idurl=self.router_idurl) if online_status.isKnown(self.router_idurl): online_status.remove_online_status_listener_callback( idurl=self.router_idurl, callback_method=self._on_router_contact_status_connected, ) online_status.remove_online_status_listener_callback( idurl=self.router_idurl, callback_method=self._on_router_contact_status_offline, ) active_router_session_machine_index = None if self.router_connection_info: active_router_session_machine = None active_router_session_machine_index = self.router_connection_info.get('index', None) if active_router_session_machine_index is not None: active_router_session_machine = automat.by_index(active_router_session_machine_index) if not active_router_session_machine: active_router_sessions = gateway.find_active_session( proto=self.router_connection_info.get('proto'), host=self.router_connection_info.get('host'), ) if not active_router_sessions: active_router_sessions = gateway.find_active_session( proto=self.router_connection_info.get('proto'), idurl=id_url.to_bin(self.router_idurl), ) if active_router_sessions: active_router_session_machine = automat.by_index(active_router_sessions[0].index) if active_router_session_machine is not None: active_router_session_machine.removeStateChangedCallback(self._on_router_session_disconnected) lg.info('removed callback from router active session: %r' % active_router_session_machine) else: lg.err('did not found active router session state machine with index %s' % active_router_session_machine_index) WriteMyOriginalIdentitySource('') config.conf().setString('services/proxy-transport/current-router', '') callback.remove_inbox_callback(self._on_inbox_packet_received) self.router_identity = None self.router_idurl = None self.router_id = '' self.router_proto_host = None self.request_service_packet_id = [] self.router_connection_info = None my_id.rebuildLocalIdentity()
def _on_queue_item_status_changed(self, pkt_out, status, error=''): from transport.proxy import proxy_receiver if status == 'finished': return False if error != 'connection failed': return False if not pkt_out.remote_idurl or not pkt_out.outpacket: return False if id_url.to_bin(pkt_out.remote_idurl) == pkt_out.outpacket.RemoteID.to_bin(): return False if not proxy_receiver.GetRouterIDURL(): return False if pkt_out.remote_idurl != proxy_receiver.GetRouterIDURL(): return False lg.err('connection failed with current proxy router, must reconnect to another router: %r %r %r' % (pkt_out, status, error, )) self.automat('router-disconnected') return True
def check_create(customer_idurl, auto_create=True, event='init'): """ Creates new instance of `queue_keeper()` state machine and send "init" event to it. """ customer_idurl = id_url.to_bin(customer_idurl) if _Debug: lg.args(_DebugLevel, customer_idurl=customer_idurl) if id_url.is_empty(customer_idurl): return None if not id_url.is_cached(customer_idurl): lg.warn('customer idurl is not cached yet, can not start QueueKeeper()') return None customer_idurl = id_url.field(customer_idurl) if customer_idurl not in list(queue_keepers().keys()): if not auto_create: return None if event: A(customer_idurl, event) if _Debug: lg.out(_DebugLevel, 'queue_keeper.check_create instance for customer %r was not found, made a new instance' % customer_idurl) return A(customer_idurl)
def remove_customer_meta_info(customer_idurl): """ """ global _CustomersMetaInfo customer_idurl = id_url.field(customer_idurl) if not customer_idurl.is_latest(): if customer_idurl.original() in _CustomersMetaInfo: if customer_idurl.to_bin() not in _CustomersMetaInfo: _CustomersMetaInfo[ customer_idurl.to_bin()] = _CustomersMetaInfo.pop( customer_idurl.original()) lg.info( 'detected and processed idurl rotate for customer meta info : %r -> %r' % (customer_idurl.original(), customer_idurl.to_bin())) customer_idurl = id_url.to_bin(customer_idurl) if customer_idurl not in _CustomersMetaInfo: lg.warn('meta info for customer %r not exist' % customer_idurl) return False if _Debug: lg.out( _DebugLevel, 'contactsdb.remove_customer_meta_info erase existing meta info for customer %r' % customer_idurl) _CustomersMetaInfo.pop(customer_idurl) json_info = { k: jsn.dict_keys_to_text(v) for k, v in id_url.to_bin_dict(_CustomersMetaInfo).items() } local_fs.WriteTextFile( settings.CustomersMetaInfoFilename(), jsn.dumps( json_info, indent=2, sort_keys=True, keys_to_text=True, values_to_text=True, )) return True
def read_customer_suppliers(customer_idurl, as_fields=True, use_cache=True): if as_fields: customer_idurl = id_url.field(customer_idurl) else: customer_idurl = id_url.to_bin(customer_idurl) rotated_idurls = id_url.list_known_idurls(customer_idurl, num_revisions=3) if _Debug: lg.args(_DebugLevel, customer_idurl=customer_idurl, rotated_idurls=rotated_idurls, as_fields=as_fields, use_cache=use_cache) result = Deferred() def _do_identity_cache(ret): all_stories = [] for _supplier_idurl in ret['suppliers']: if _supplier_idurl: _supplier_idurl = id_url.to_bin(_supplier_idurl) if not id_url.is_cached( _supplier_idurl) or not identitycache.HasFile( _supplier_idurl): one_supplier_story = identitycache.immediatelyCaching( _supplier_idurl) if _Debug: one_supplier_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache' ) all_stories.append(one_supplier_story) _customer_idurl = id_url.to_bin(ret['customer_idurl']) if _customer_idurl and (not id_url.is_cached(_customer_idurl) or not identitycache.HasFile(_customer_idurl)): one_customer_story = identitycache.immediatelyCaching( _customer_idurl) if _Debug: one_customer_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') all_stories.append(one_customer_story) if _Debug: lg.args(_DebugLevel, all_stories=len(all_stories), ret=ret) id_cache_story = DeferredList(all_stories, consumeErrors=True) id_cache_story.addCallback(_do_save_customer_suppliers, ret) if _Debug: id_cache_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') id_cache_story.addErrback(result.errback) return id_cache_story def _do_verify(dht_value, customer_idurl_bin): if customer_idurl_bin in rotated_idurls: rotated_idurls.remove(customer_idurl_bin) ret = { 'suppliers': [], 'ecc_map': None, 'customer_idurl': customer_idurl, 'revision': 0, 'publisher_idurl': None, 'timestamp': None, } if not dht_value or not isinstance(dht_value, dict): if not rotated_idurls: result.callback(ret) return ret another_customer_idurl_bin = rotated_idurls.pop(0) lg.warn( 'found another rotated idurl %r and re-try reading customer suppliers' % another_customer_idurl_bin) d = dht_records.get_suppliers(another_customer_idurl_bin, return_details=True, use_cache=False) d.addCallback(_do_verify, another_customer_idurl_bin) d.addErrback(_on_error) return ret try: _ecc_map = strng.to_text(dht_value['ecc_map']) if as_fields: _customer_idurl = id_url.field(dht_value['customer_idurl']) _publisher_idurl = id_url.field( dht_value.get('publisher_idurl')) _suppliers_list = id_url.fields_list(dht_value['suppliers']) else: _customer_idurl = id_url.to_bin(dht_value['customer_idurl']) _publisher_idurl = id_url.to_bin( dht_value.get('publisher_idurl')) _suppliers_list = id_url.to_bin_list(dht_value['suppliers']) _revision = int(dht_value.get('revision')) _timestamp = int(dht_value.get('timestamp')) except: lg.exc() result.callback(ret) return ret ret.update({ 'suppliers': _suppliers_list, 'ecc_map': _ecc_map, 'customer_idurl': _customer_idurl, 'revision': _revision, 'publisher_idurl': _publisher_idurl, 'timestamp': _timestamp, }) return _do_identity_cache(ret) def _do_save_customer_suppliers(id_cached_result, ret): if my_id.getIDURL() != id_url.field(ret['customer_idurl']): contactsdb.set_suppliers(ret['suppliers'], customer_idurl=ret['customer_idurl']) contactsdb.save_suppliers(customer_idurl=ret['customer_idurl']) if ret.get('ecc_map'): for supplier_idurl in ret['suppliers']: if supplier_idurl and id_url.is_cached(supplier_idurl): contactsdb.add_supplier_meta_info( supplier_idurl=supplier_idurl, info={ 'ecc_map': ret['ecc_map'], }, customer_idurl=ret['customer_idurl'], ) else: if _Debug: lg.out( _DebugLevel, 'dht_relations._do_save_customer_suppliers SKIP processing my own suppliers' ) if _Debug: lg.out( _DebugLevel, 'dht_relations._do_save_customer_suppliers OK for %r returned %d suppliers' % ( ret['customer_idurl'], len(ret['suppliers']), )) result.callback(ret) return ret def _on_error(err): try: msg = err.getErrorMessage() except: msg = str(err).replace('Exception:', '') if _Debug: lg.out( _DebugLevel, 'dht_relations.read_customer_suppliers ERROR %r failed with %r' % ( customer_idurl, msg, )) result.errback(err) return None customer_idurl_bin = id_url.to_bin(customer_idurl) # if customer_idurl_bin in rotated_idurls: # rotated_idurls.remove(customer_idurl_bin) d = dht_records.get_suppliers(customer_idurl_bin, return_details=True, use_cache=use_cache) d.addCallback(_do_verify, customer_idurl_bin) d.addErrback(_on_error) return result
def read_customer_message_brokers(customer_idurl, positions=[ 0, ], return_details=True, as_fields=True, use_cache=True): if _Debug: lg.args(_DebugLevel, customer_idurl=customer_idurl, use_cache=use_cache, positions=positions) if as_fields: customer_idurl = id_url.field(customer_idurl) else: customer_idurl = id_url.to_bin(customer_idurl) result = Deferred() def _on_borker_identity_cache_failed(err, position, broker_result): if _Debug: lg.args(_DebugLevel, position=position, err=err) broker_result.callback({ 'timestamp': None, 'revision': 0, 'customer_idurl': customer_idurl, 'broker_idurl': None, 'position': position, }) return None def _do_broker_identity_cache(dht_record, position, broker_result): one_broker_task = identitycache.GetLatest( id_url.to_bin(dht_record['broker_idurl'])) one_broker_task.addCallback( lambda xmlsrc: broker_result.callback(dht_record)) one_broker_task.addErrback(_on_borker_identity_cache_failed, position, broker_result) # if _Debug: # lg.args(_DebugLevel, position=position, broker_idurl=dht_record['broker_idurl']) return None def _do_verify(dht_value, position, broker_result): ret = { 'timestamp': None, 'revision': 0, 'customer_idurl': customer_idurl, 'broker_idurl': None, 'position': position, } if not dht_value or not isinstance(dht_value, dict): if _Debug: lg.args(_DebugLevel, c=customer_idurl, p=position, dht_value=type(dht_value)) broker_result.callback(ret) return ret try: if as_fields: _customer_idurl = id_url.field(dht_value['customer_idurl']) _broker_idurl = id_url.field(dht_value['broker_idurl']) else: _customer_idurl = id_url.to_bin(dht_value['customer_idurl']) _broker_idurl = id_url.to_bin(dht_value['broker_idurl']) _position = int(dht_value['position']) _revision = int(dht_value.get('revision')) _timestamp = int(dht_value.get('timestamp')) except: lg.exc() broker_result.callback(ret) return ret if _Debug: lg.args(_DebugLevel, p=position, b=_broker_idurl, r=_revision) if as_fields: if _customer_idurl != customer_idurl: lg.err( 'wrong customer idurl %r in message broker DHT record for %r at position %d' % (_customer_idurl, customer_idurl, position)) broker_result.callback(ret) return ret if position != _position: lg.err( 'wrong position value %d in message broker DHT record for %r at position %d' % (_position, customer_idurl, position)) broker_result.callback(ret) return ret ret.update({ 'customer_idurl': _customer_idurl, 'broker_idurl': _broker_idurl, 'position': _position, 'revision': _revision, 'timestamp': _timestamp, }) _do_broker_identity_cache(ret, position, broker_result) return None def _on_error(err, position, broker_result): try: msg = err.getErrorMessage() except: msg = str(err).replace('Exception:', '') if _Debug: lg.out( _DebugLevel, 'dht_relations.read_customer_message_brokers ERROR %r at position %d failed with %r' % ( customer_idurl, position, msg, )) broker_result.errback(err) return None def _do_collect_results(all_results): # if _Debug: # lg.args(_DebugLevel, all_results=len(all_results)) final_result = [] for one_success, one_result in all_results: if one_success and one_result['broker_idurl']: final_result.append(one_result) final_result.sort(key=lambda i: i.get('position')) if _Debug: lg.args(_DebugLevel, results=len(final_result)) result.callback(final_result) return None def _do_read_brokers(): all_brokers_results = [] for position in positions: one_broker_result = Deferred() all_brokers_results.append(one_broker_result) d = dht_records.get_message_broker( customer_idurl=customer_idurl, position=position, return_details=return_details, use_cache=use_cache, ) d.addCallback(_do_verify, position, one_broker_result) if _Debug: d.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_message_brokers._do_read_brokers') d.addErrback(_on_error, position, one_broker_result) join_all_brokers = DeferredList(all_brokers_results, consumeErrors=False) join_all_brokers.addCallback(_do_collect_results) if _Debug: join_all_brokers.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_message_brokers._do_read_brokers') join_all_brokers.addErrback(result.errback) return None d = identitycache.GetLatest(customer_idurl) d.addCallback(lambda _: _do_read_brokers()) if _Debug: d.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_message_brokers') d.addErrback(result.errback) return result