def _do_retry_one_time(self, fail_info): to_idurl = id_url.field(fail_info['to']).to_bin() from_idurl = id_url.field(fail_info['from']).to_bin() _key = (fail_info['command'], fail_info['packet_id'], from_idurl, to_idurl) current_retries = self.packets_retries.get(_key, 0) if _Debug: lg.args(_DebugLevel, key=_key, retries=current_retries) if fail_info.get('error') != 'route already closed': if _Debug: lg.dbg(_DebugLevel, 'failed sending routed packet : %r' % fail_info) self._do_clean_sent_packet(fail_info) self._do_cancel_outbox_packets(fail_info) self.packets_retries.pop(_key, None) return if current_retries >= 1: if _Debug: lg.dbg( _DebugLevel, 'failed sending routed packet after few attempts : %r' % fail_info) self.automat('retry-failed', fail_info) self._do_clean_sent_packet(fail_info) self._do_cancel_outbox_packets(fail_info) self.packets_retries.pop(_key, None) return self.packets_retries[_key] = current_retries + 1 d = identitycache.immediatelyCaching(fail_info['to']) d.addCallback(self._on_cache_retry_success, fail_info) d.addErrback(self._on_cache_retry_failed, fail_info)
def _on_extract_done(self, retcode, backupID, source_filename, output_location, backup_index): tmpfile.throw_out(source_filename, 'file extracted') for snapshot_filename in os.listdir(output_location): snapshot_path = os.path.join(output_location, snapshot_filename) snapshot_data = serialization.BytesToDict( local_fs.ReadBinaryFile(snapshot_path), values_to_text=True) for archive_message in snapshot_data.get('items', []): if self.start_sequence_id is not None: if self.start_sequence_id > archive_message['sequence_id']: continue if self.end_sequence_id is not None: if self.end_sequence_id < archive_message['sequence_id']: continue self.extracted_messages.append(archive_message) if _Debug: lg.dbg( _DebugLevel, 'archive snapshot %r extracted successfully to %r, extracted %d archive messages so far' % ( source_filename, output_location, len(self.extracted_messages), )) self._do_restore_next_backup(backup_index + 1) return retcode
def _on_identity_cached(self, idurl, node): if self.stopped: return None if not idurl: self._on_node_process_failed(None, node) return None if id_url.is_in(idurl, self.ignore_idurls): if _Debug: lg.dbg( _DebugLevel, 'lookup.DiscoveryTask[%r]._on_identity_cached IGNORE %r' % (self.id, idurl)) self._on_node_process_failed(None, node) return None self.cached_count += 1 idurl = id_url.to_bin(idurl) if idurl not in discovered_idurls(layer_id=self.layer_id): discovered_idurls(layer_id=self.layer_id).append(idurl) known_idurls()[idurl] = time.time() self._on_node_succeed(node, idurl) if _Debug: lg.out( _DebugLevel, 'lookup.DiscoveryTask[%r]._on_identity_cached : %s' % (self.id, idurl)) return idurl
def OnFileSendingFinished(self, pkt_out, item, status, size, error_message): if self.shutdown: lg.warn('skip, supplier queue is shutting down') return if not pkt_out.outpacket: lg.warn('skip, outpacket is already None') return packetID = global_id.CanonicalID(pkt_out.outpacket.PacketID) if status == 'finished': if pkt_out.outpacket.Command == commands.Retrieve(): if packetID in self.fileRequestQueue: f_down = self.fileRequestDict[packetID] if _Debug: lg.args(_DebugLevel, obj=f_down, status=status, packetID=packetID, event='retrieve-sent') f_down.event('retrieve-sent', pkt_out.outpacket) elif pkt_out.outpacket.Command == commands.Data(): if packetID in self.fileSendQueue: f_up = self.fileSendDict[packetID] if _Debug: lg.args(_DebugLevel, obj=f_up, status=status, packetID=packetID, event='data-sent') f_up.event('data-sent', pkt_out.outpacket) else: if pkt_out.outpacket.Command == commands.Retrieve(): if packetID in self.fileRequestQueue: if _Debug: lg.dbg(_DebugLevel, 'packet %r is %r during downloading from %s' % (packetID, status, self.remoteID)) f_down = self.fileRequestDict[packetID] f_down.event('request-failed') elif pkt_out.outpacket.Command == commands.Data(): if packetID in self.fileSendQueue: if _Debug: lg.dbg(_DebugLevel, 'packet %r is %r during uploading to %s' % (packetID, status, self.remoteID)) f_up = self.fileSendDict[packetID] f_up.event('sending-failed')
def decrypt(self, decrypt_session_func=None): if _Debug: lg.args(_DebugLevel, decrypt_session_func=decrypt_session_func, recipient=self.recipient) if not decrypt_session_func: if my_keys.is_key_registered(self.recipient): if _Debug: lg.dbg(_DebugLevel, 'decrypt with registered key %r' % self.recipient) decrypt_session_func = lambda inp: my_keys.decrypt( self.recipient, inp) if not decrypt_session_func: glob_id = global_id.ParseGlobalID(self.recipient) if glob_id['idurl'] == my_id.getIDURL(): if glob_id['key_alias'] == 'master': if _Debug: lg.dbg( _DebugLevel, 'decrypt with my master key %r' % self.recipient) decrypt_session_func = lambda inp: my_keys.decrypt( 'master', inp) if not decrypt_session_func: raise Exception('can not find key for given recipient: %s' % self.recipient) decrypted_sessionkey = decrypt_session_func(self.encrypted_session) return key.DecryptWithSessionKey(decrypted_sessionkey, self.encrypted_body, session_key_type=key.SessionKeyType())
def on_delete_file(newpacket): # TODO: call verify_packet_ownership() if not newpacket.Payload: ids = [ newpacket.PacketID, ] else: ids = strng.to_text(newpacket.Payload).split('\n') filescount = 0 dirscount = 0 lg.warn('going to erase files: %s' % ids) customer_id = global_id.UrlToGlobalID(newpacket.OwnerID) for pcktID in ids: glob_path = global_id.ParseGlobalID(pcktID) if not glob_path['customer']: glob_path = global_id.ParseGlobalID(customer_id + ':' + pcktID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if customer_id != glob_path['customer']: lg.warn('trying to delete file stored for another cusomer') continue # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'not a customer, or file not found') return False if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) # if self.publish_event_supplier_file_modified: # events.send('supplier-file-modified', data=dict( # action='delete', # glob_path=glob_path['path'], # owner_id=newpacket.OwnerID, # )) if _Debug: lg.dbg( _DebugLevel, "from [%s] with %d IDs, %d files and %d folders were removed" % (newpacket.OwnerID, len(ids), filescount, dirscount)) p2p_service.SendAck(newpacket) return True
def ping(idurl, channel=None, ack_timeout=15, ping_retries=0, keep_alive=False): """ Doing handshake with remote node only if it is currently not connected. Returns Deferred object. """ idurl = strng.to_bin(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, keep_alive=keep_alive, channel=channel) result = Deferred() result.addErrback(on_ping_failed, idurl=idurl, channel=channel) if id_url.is_empty(idurl): result.errback(Exception('empty idurl provided')) return result if not id_url.is_cached(idurl): if _Debug: lg.dbg(_DebugLevel, 'user identity %r not cached yet, executing clean handshake' % idurl) return handshaker.ping( idurl=idurl, ack_timeout=ack_timeout, ping_retries=ping_retries, channel=channel or 'clean_ping', keep_alive=keep_alive, ) idurl = id_url.field(idurl) if not isKnown(idurl): if not check_create(idurl, keep_alive=keep_alive): raise Exception('can not create instance') A(idurl, 'ping-now', result, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries, original_idurl=idurl.to_original()) return result
def _do_clean_sent_packet(self, info): to_idurl = id_url.to_bin(info['to']) to_remove = [] for _key in self.sent_packets.keys(): routed_packet, outpacket = self.sent_packets.get( _key, ( None, None, )) if not outpacket: if _Debug: lg.dbg(_DebugLevel, 'found empty outpacket : %r' % routed_packet) to_remove.append(_key) continue if outpacket.Command != info['command']: continue if outpacket.PacketID != info['packet_id']: continue if outpacket.RemoteID.to_bin() != to_idurl: continue to_remove.append(_key) for _key in to_remove: routed_packet, outpacket = self.sent_packets.pop( _key, ( None, None, ))
def handshake(idurl, channel=None, ack_timeout=15, ping_retries=2, keep_alive=False): """ Immediately doing handshake with remote node by fetching remote identity file and then sending my own Identity() to remote peer and wait for an Ack() packet. Returns Deferred object. """ idurl = strng.to_bin(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, keep_alive=keep_alive, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries) result = Deferred() result.addErrback(on_ping_failed, idurl=idurl, channel=channel) if id_url.is_empty(idurl): result.errback(Exception('empty idurl provided')) return result if not id_url.is_cached(idurl): if _Debug: lg.dbg(_DebugLevel, 'user identity %r not cached yet, executing clean handshake' % idurl) return handshaker.ping( idurl=idurl, ack_timeout=ack_timeout, ping_retries=ping_retries, channel=channel or 'clean_handshake', keep_alive=keep_alive, ) idurl = id_url.field(idurl) if not isKnown(idurl): if not check_create(idurl, keep_alive=keep_alive): raise Exception('can not create instance') A(idurl, 'handshake', result, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries, original_idurl=idurl.to_original()) return result
def state_changed(self, oldstate, newstate, event, *args, **kwargs): """ Method to catch the moment when index_synchronizer() state were changed. """ if newstate == 'IN_SYNC!': if A().last_time_in_sync > 0 and time.time() - A( ).last_time_in_sync < 30: if _Debug: lg.dbg( _DebugLevel, 'backup index already synchronized %r seconds ago' % (time.time() - A().last_time_in_sync)) else: if _Debug: lg.dbg( _DebugLevel, 'backup index just synchronized, sending "my-backup-index-synchronized" event' ) events.send('my-backup-index-synchronized', data={}) self.last_time_in_sync = time.time() if self.PushAgain: reactor.callLater(0, self.automat, 'instant') # @UndefinedVariable if newstate == 'NO_INFO' and oldstate in [ 'REQUEST?', 'SENDING', ]: events.send('my-backup-index-out-of-sync', data={}) if newstate == 'NO_INFO': self.last_time_in_sync = -1
def _on_list_files_failed(self, response, info, customer_idurl, supplier_idurl, key_id): if strng.to_text(response.Payload) == 'key not registered': if _Debug: lg.dbg( _DebugLevel, 'supplier %r of customer %r do not possess public key %r yet, sending it now' % ( supplier_idurl, customer_idurl, key_id, )) result = key_ring.transfer_key(key_id, supplier_idurl, include_private=False, include_signature=False) result.addCallback(lambda r: self._on_key_transfer_success( customer_idurl, supplier_idurl, key_id)) result.addErrback(lambda err: lg.err('failed sending key %r : %r' % ( key_id, err, ))) else: lg.err( 'failed requesting ListFiles() with %r for customer %r from supplier %r' % ( key_id, customer_idurl, supplier_idurl, )) return None
def OutboxStatus(pkt_out, status, error=''): """ This method is called when raised a status report after sending a packet to remote peer. If packet sending was failed - user seems to be OFFLINE. """ global _ShutdownFlag if _ShutdownFlag: return False if pkt_out.outpacket.RemoteID.to_bin() == my_id.getIDURL().to_bin(): return False if pkt_out.outpacket.CreatorID.to_bin() != my_id.getIDURL().to_bin(): return False if status == 'finished': if error == 'unanswered' and pkt_out.outpacket.Command == commands.Identity(): if pkt_out.outpacket.OwnerID == my_id.getIDURL() and pkt_out.outpacket.CreatorID == my_id.getIDURL(): # if not handshaker.is_running(pkt_out.outpacket.RemoteID): if _Debug: lg.dbg(_DebugLevel, 'ping packet %s addressed to %r was "unanswered"' % (pkt_out, pkt_out.outpacket.RemoteID, )) else: if _Debug: lg.dbg(_DebugLevel, 'packet %s is "%s" with %s error: %r' % (pkt_out, status, pkt_out.outpacket, error)) if pkt_out.outpacket.Command == commands.Identity(): if pkt_out.outpacket.OwnerID == my_id.getIDURL() and pkt_out.outpacket.CreatorID == my_id.getIDURL(): if handshaker.is_running(pkt_out.outpacket.RemoteID): handshaker.on_identity_packet_outbox_status(pkt_out, status, error) return False
def _stun_port_received(self, result, node): if _Debug: lg.out( _DebugLevel, 'stun_client._stun_port_received %r from %s node_id=%r' % ( result, node, node.id, )) self.deferreds.pop(node.id, None) if not isinstance(result, dict): if _Debug: lg.dbg('empty result received from node %r : %r' % ( node, result, )) return try: port = int(strng.to_text(result['stun_port'])) address = node.address except: lg.exc() return if port == 0: return if _Debug: lg.out( _DebugLevel, ' new stun port server found %s:%s' % ( address, port, )) self.automat('port-number-received', (address, port))
def on_files_received(newpacket, info): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False if list_files_global_id['idurl'] != my_id.getLocalID(): # ignore Files() if this is another customer if _Debug: lg.dbg( _DebugLevel, 'ignore incoming %r which is owned by another customer' % newpacket) return False if not contactsdb.is_supplier(newpacket.OwnerID): # ignore Files() if this is not my supplier if _Debug: lg.dbg( _DebugLevel, 'incoming %r received, but %r is not my supplier' % ( newpacket, newpacket.OwnerID, )) return False if _Debug: lg.args( _DebugLevel, "service_backups._on_inbox_packet_received: %r for us from %s at %s" % (newpacket, newpacket.CreatorID, info)) if IncomingSupplierListFiles(newpacket, list_files_global_id): p2p_service.SendAck(newpacket) else: p2p_service.SendFail(newpacket) return True
def _do_id_server_health_check(self): my_idurl = my_id.getLocalIdentity().getIDURL(as_original=True) if _Debug: lg.args(_DebugLevel, my_idurl=my_idurl) def _verify(xmlsrc=None): if not xmlsrc: lg.err('my current identity server not healthy') self.NeedPropagate = True self.automat('check-synchronize') return remote_ident = identity.identity(xmlsrc=xmlsrc) if not remote_ident.isCorrect() or not remote_ident.Valid(): lg.warn('my current identity server responded with bad identity file') self.NeedPropagate = True self.automat('check-synchronize') return if remote_ident.getIDURL(as_original=True) != my_idurl: lg.warn('my current identity server responded with unknown identity') self.NeedPropagate = True self.automat('check-synchronize') return if _Debug: lg.dbg(_DebugLevel, 'my current identity server is healthy') last_time = identitycache.last_time_cached(my_idurl) if last_time and time.time() - last_time < config.conf().getInt('services/identity-propagate/health-check-interval-seconds'): if _Debug: lg.dbg(_DebugLevel, 'skip health check of my current identity server, last time cached %f seconds ago' % (time.time() - last_time)) return d = identitycache.immediatelyCaching(my_idurl, try_other_sources=False) d.addCallback(_verify) d.addErrback(lambda _: _verify() and None)
def do_notify(callback_method, consumer_id, queue_id, message_id): existing_message = queue(queue_id)[message_id] event_id = global_id.ParseGlobalQueueID(queue_id)['queue_alias'] if consumer_id in existing_message.notifications: if _Debug: lg.dbg( _DebugLevel, 'notification %r already started for consumer %r' % ( message_id, consumer_id, )) # notification already sent to given consumer return False ret = Deferred() start_notification(consumer_id, queue_id, message_id, ret) if id_url.is_idurl(callback_method): p2p_service.SendEvent( remote_idurl=id_url.field(callback_method), event_id=event_id, payload=existing_message.payload, producer_id=existing_message.producer_id, consumer_id=consumer_id, queue_id=queue_id, message_id=existing_message.message_id, created=existing_message.created, response_timeout=15, callbacks={ commands.Ack(): lambda response, info: ret.callback(True), commands.Fail(): lambda response, info: ret.callback(False), None: lambda pkt_out: ret.callback(False), }, ) else: try: result = callback_method( dict( event_id=event_id, payload=existing_message.payload, producer_id=existing_message.producer_id, consumer_id=consumer_id, queue_id=queue_id, message_id=existing_message.message_id, created=existing_message.created, )) except: lg.exc() result = False if isinstance(result, Deferred): result.addCallback(lambda ok: ret.callback(True) if ok else ret.callback(False)) if _Debug: result.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='p2p_queue.do_notify') result.addErrback(lambda err: ret.callback(False)) else: reactor.callLater(0, ret.callback, result) # @UndefinedVariable return ret
def AddSupplierToHire(idurl): """ """ global _SuppliersToHire idurl = strng.to_bin(idurl) if idurl not in _SuppliersToHire: _SuppliersToHire.append(idurl) if _Debug: lg.dbg(_DebugLevel, 'added %r as a supplier candidate' % idurl)
def shutdown(): """ """ global _HistoryDB global _HistoryCursor _HistoryDB.commit() _HistoryDB.close() if _Debug: lg.dbg(_DebugLevel, '')
def _init_local(self): from p2p import commands from lib import net_misc from lib import misc from system import tmpfile from system import run_upnpc from raid import eccmap from contacts import identitydb from crypt import my_keys from userid import id_url from userid import my_id id_url.init() identitydb.init() my_id.init() if settings.enableWebStream(): try: from logs import weblog weblog.init(settings.getWebStreamPort()) except: lg.exc() if settings.enableWebTraffic(): try: from logs import webtraffic webtraffic.init(port=settings.getWebTrafficPort()) except: lg.exc() misc.init() commands.init() tmpfile.init(settings.getTempDir()) net_misc.init() settings.update_proxy_settings() run_upnpc.init() eccmap.init() my_keys.init() # if sys.argv.count('--twisted'): # from twisted.python import log as twisted_log # twisted_log.startLogging(MyTwistedOutputLog(), setStdout=0) # import twisted.python.failure as twisted_failure # twisted_failure.startDebugMode() # twisted_log.defaultObserver.stop() # if settings.getDebugLevel() > 10: # defer.setDebugging(True) if settings.enableMemoryProfile(): try: from guppy import hpy # @UnresolvedImport hp = hpy() hp.setrelheap() if _Debug: lg.out(_DebugLevel, 'hp.heap():\n'+str(hp.heap())) lg.out(_DebugLevel, 'hp.heap().byrcs:\n'+str(hp.heap().byrcs)) lg.out(_DebugLevel, 'hp.heap().byvia:\n'+str(hp.heap().byvia)) except: if _Debug: lg.out(_DebugLevel, "guppy package is not installed") if _Debug: lg.dbg(_DebugLevel, 'all local modules are initialized, ready to start the engine')
def InsertSupplierToHire(idurl): """ """ global _SuppliersToHire idurl = strng.to_bin(idurl) if idurl not in _SuppliersToHire: _SuppliersToHire.insert(0, idurl) if _Debug: lg.dbg(_DebugLevel, 'added %r as a FIRST supplier candidate' % idurl)
def dataReceived(self, data): try: json_data = serialization.BytesToDict(data, keys_to_text=True, values_to_text=True) except: lg.exc() return if _Debug: lg.dbg(_DebugLevel, 'received %d bytes from web socket: %r' % (len(data), json_data)) if not do_process_incoming_message(json_data): lg.warn('failed processing incoming message from web socket: %r' % json_data)
def doCancelPackets(self, *args, **kwargs): """ Action method. """ packetsToCancel = packet_out.search_by_packet_id(self.packetID) for pkt_out in packetsToCancel: if pkt_out.outpacket.Command == commands.Retrieve(): if _Debug: lg.dbg(_DebugLevel, 'sending "cancel" to %s addressed to %s because downloading cancelled' % ( pkt_out, pkt_out.remote_idurl, )) pkt_out.automat('cancel')
def push(json_data): global _WebSocketTransport if not _WebSocketTransport: return False raw_bytes = serialization.DictToBytes(json_data) _WebSocketTransport.write(raw_bytes) if _Debug: lg.dbg(_DebugLevel, 'sent %d bytes to web socket: %r' % (len(raw_bytes), json_data)) if _Debug: lg.out(0, '*** API WS PUSH %d bytes : %r' % (len(json_data), json_data, )) if _APILogFileEnabled: lg.out(0, '*** WS PUSH %d bytes : %r' % (len(json_data), json_data, ), log_name='api', showtime=True) return True
def sign_key(key_id, keys_folder=None, ignore_shared_keys=False, save=True): key_id = latest_key_id(strng.to_text(key_id)) if not is_key_registered(key_id): lg.warn('key %s is not found' % key_id) return False if not keys_folder: keys_folder = settings.KeyStoreDir() key_object = known_keys()[key_id] if key_object.signed: if key_object.signed[1] != key.MyPublicKey(): if ignore_shared_keys: if _Debug: lg.dbg( _DebugLevel, 'skip generating signature for shared key: %r' % key_id) return True raise Exception( 'must not generate and overwrite existing signature for shared key: %r' % key_id) signed_key_info = make_key_info( key_object=key_object, key_id=key_id, include_private=not key_object.isPublic(), generate_signature=True, ) key_object.signed = ( signed_key_info['signature'], signed_key_info['signature_pubkey'], ) known_keys()[key_id] = key_object if save: save_key(key_id, keys_folder=keys_folder) events.send('key-signed', data=dict( key_id=key_id, label=key_object.label, key_size=key_object.size(), )) listeners.push_snapshot('key', snap_id=key_id, data=make_key_info( key_object=key_object, key_id=key_id, event='key-signed', include_private=False, include_local_id=True, include_signature=True, include_label=True, )) return key_object
def IncomingListFiles(newpacket): """ Called from ``p2p.backup_control`` to pass incoming "ListFiles" packet here. """ if not A(): return if newpacket.PacketID in A().requested_lf_packet_ids: A().received_lf_counter += 1 A().requested_lf_packet_ids.discard(newpacket.PacketID) A('inbox-files', newpacket) else: if _Debug: lg.dbg(_DebugLevel, 'received and ignored %r, currently target customer is %r' % (newpacket, A().target_customer_idurl, ))
def doInitServices(self, *args, **kwargs): """ Action method. """ if bpio.Android(): lg.close_intercepted_log_file() lg.open_intercepted_log_file('/storage/emulated/0/Android/data/org.bitdust_io.bitdust1/files/Documents/.bitdust/logs/android.log', mode='a') if _Debug: lg.dbg(_DebugLevel, 'log file "android.log" re-opened') if _Debug: lg.out(_DebugLevel, 'initializer.doInitServices') driver.init() d = driver.start() d.addBoth(lambda x: self.automat('init-services-done'))
def _on_cache_retry_success(self, xmlsrc, fail_info): if _Debug: lg.args(_DebugLevel, sent_packets=len(self.sent_packets), fail_info=fail_info) to_idurl = id_url.to_bin(fail_info['to']) for _key in self.sent_packets.keys(): routed_packet, outpacket = self.sent_packets.get( _key, ( None, None, )) if not outpacket: if _Debug: lg.dbg(_DebugLevel, 'found empty outpacket : %r' % routed_packet) continue # if _Debug: # lg.args(_DebugLevel, # routed_packet, # outpacket, # outpacket.Command == fail_info['command'], # outpacket.PacketID == fail_info['packet_id'], # outpacket.RemoteID.to_bin() == to_idurl, # ) if outpacket.Command != fail_info['command']: continue if outpacket.PacketID != fail_info['packet_id']: continue if outpacket.RemoteID.to_bin() != to_idurl: continue routed_retry_packet = self._do_send_packet_to_router( outpacket=outpacket, callbacks=routed_packet.callbacks, wide=fail_info.get('wide', False), keep_alive=fail_info.get('keep_alive', False), response_timeout=fail_info.get('response_timeout', None), is_retry=True, ) if not routed_retry_packet: self.automat('retry-send-failed', fail_info) else: self.sent_packets[_key] = ( routed_retry_packet, outpacket, ) self.automat('retry', fail_info) del routed_packet return None
def sign_key_info(key_info): key_info['signature_pubkey'] = key.MyPublicKey() hash_items = [] for field in [ 'alias', 'public', 'signature_pubkey', ]: hash_items.append(strng.to_text(key_info[field])) hash_text = '-'.join(hash_items) if _Debug: lg.dbg(_DebugLevel, hash_text) hash_bin = key.Hash(strng.to_bin(hash_text)) key_info['signature'] = strng.to_text(key.Sign(hash_bin)) return key_info
def _do_cancel_outbox_packets(self, fail_info): to_idurl = id_url.field(fail_info['to']) from_idurl = id_url.field(fail_info['from']) for p in packet_out.search_by_packet_id(fail_info['packet_id']): if p.outpacket.Command == fail_info['command']: if id_url.to_bin(to_idurl) == p.outpacket.RemoteID.to_bin(): if p.outpacket.CreatorID.to_bin() == id_url.to_bin( from_idurl) or p.outpacket.OwnerID.to_bin( ) == id_url.to_bin(from_idurl): if _Debug: lg.dbg( _DebugLevel, 'about to cancel %r because sending via proxy transport is failed' % p) p.automat('cancel')
def isServiceAccepted(self, *args, **kwargs): """ Condition method. """ newpacket = args[0] if strng.to_text(newpacket.Payload).startswith('accepted'): if _Debug: lg.dbg( _DebugLevel, 'supplier %s accepted my request and will be connected' % self.supplier_idurl) return True if _Debug: lg.dbg(_DebugLevel, 'supplier %s refused my request' % self.supplier_idurl) return False