def doSaveOptions(self, arg): """ Action method. """ p, self.host, self.options = arg if p != self.proto: lg.warn('wrong protocol')
def _verify_transport(proto): if _Debug: lg.out(_DebugLevel - 8, " verifying %s_transport" % proto) if not settings.transportIsEnabled(proto): if _Debug: lg.out(_DebugLevel - 8, " %s_transport is disabled" % proto) return succeed(True) transp = transport(proto) if transp.state == "OFFLINE": if _Debug: lg.out(_DebugLevel - 8, " %s_transport state is OFFLINE" % proto) return succeed(True) if transp.state != "LISTENING": if _Debug: lg.out(_DebugLevel - 8, " %s_transport state is not LISTENING" % proto) return succeed(True) transp_result = transp.interface.verify_contacts(my_id_obj) if _Debug: lg.out(_DebugLevel - 8, " %s result is %r" % (proto, transp_result)) if isinstance(transp_result, bool) and transp_result: return succeed(True) if isinstance(transp_result, bool) and transp_result == False: return succeed(False) if isinstance(transp_result, Deferred): ret = Deferred() transp_result.addCallback(lambda result_value: ret.callback(result_value)) return ret lg.warn("incorrect result returned from %s_interface.verify_contacts(): %r" % (proto, transp_result)) return succeed(False)
def doBroadcastMessage(self, arg): """ Action method. """ from broadcast import broadcast_service msg, newpacket = arg msgid = msg['id'] if _Debug: lg.out(_DebugLevel, 'broadcaster_node.doBroadcastMessage %s' % msgid) if msgid in self.messages_sent: lg.warn('CRITICAL, found same message already broadcasted !!!') return # if some listeners connected - send to them for listener_idurl, scope in self.listeners.items(): if listener_idurl == newpacket.OwnerID: # skip this listener continue lg.out(4, ' test %s:%s for %s' % (listener_idurl, scope, msg['owner'])) # but check if they really need that message # listener can set a scope, so he will get this broadcasting # only if creator of that message is listed in scope if not scope or msg['owner'] in scope: outpacket = broadcast_service.packet_for_listener( listener_idurl, msg) p2p_service.SendBroadcastMessage(outpacket) # fire broadcast listening callback if self.incoming_broadcast_message_callback is not None: self.incoming_broadcast_message_callback(msg) for broadcaster_idurl in self.connected_broadcasters: outpacket = broadcast_service.packet_for_broadcaster( broadcaster_idurl, msg) p2p_service.SendBroadcastMessage(outpacket) self.messages_sent[msgid] = int(time.time())
def _list(params): result = [] path = params['path'] if bpio.Linux() or bpio.Mac(): path = '/' + path.lstrip('/') lst = backup_fs.ListByPathAdvanced(path) if not isinstance(lst, list): lg.warn('backup_fs.ListByPathAdvanced returned: %s' % lst) return {"result": [], } for item in lst: if item[2] == 'index': continue result.append({ "type": item[0], "name": item[1], "id": item[2], "rights": "", "size": item[3], "date": item[4], "dirpath": item[5], "has_childs": item[6], "content": '1' if item[7] else '', "versions": item[8], }) return {'result': result, }
def RemoteFileReport(backupID, blockNum, supplierNum, dataORparity, result): """ Writes info for a single piece of data into "remote" matrix. May be called when you got an Ack packet from remote supplier after you sent him some Data packet . """ blockNum = int(blockNum) supplierNum = int(supplierNum) if supplierNum > contactsdb.num_suppliers(): lg.out(4, "backup_matrix.RemoteFileReport got too big supplier number, possible this is an old packet") return if backupID not in remote_files(): remote_files()[backupID] = {} lg.out(8, "backup_matrix.RemoteFileReport new remote entry for %s created in the memory" % backupID) if blockNum not in remote_files()[backupID]: remote_files()[backupID][blockNum] = { "D": [0] * contactsdb.num_suppliers(), "P": [0] * contactsdb.num_suppliers(), } # save backed up block info into remote info structure, synchronize on hand info flag = 1 if result else 0 if dataORparity == "Data": remote_files()[backupID][blockNum]["D"][supplierNum] = flag elif dataORparity == "Parity": remote_files()[backupID][blockNum]["P"][supplierNum] = flag else: lg.warn("incorrect backup ID: %s" % backupID) # if we know only 5 blocks stored on remote machine # but we have backed up 6th block - remember this remote_max_block_numbers()[backupID] = max(remote_max_block_numbers().get(backupID, -1), blockNum) # mark to repaint this backup in gui RepaintBackup(backupID)
def _on_request_service_fail(self, response, info): if response.PacketID not in self.request_service_packet_id: lg.warn('wong PacketID in response: %s, but outgoing was : %s' % ( response.PacketID, str(self.request_service_packet_id))) else: self.request_service_packet_id.remove(response.PacketID) self.automat('service-refused', (response, info))
def _got_my_address(self, value, key): if not isinstance(value, dict): lg.warn('can not read my address') self.automat('dht-write-failed') return try: addr = value[dht_service.key_to_hash(key)].strip('\n').strip() except: if _Debug: lg.out( 4, 'udp_node._got_my_address ERROR wrong key in response: %r' % value) lg.exc() self.automat('dht-write-failed') return if addr != '%s:%d' % (self.my_address[0], self.my_address[1]): if _Debug: lg.out( 4, 'udp_node._got_my_address ERROR value not fit: %r' % value) self.automat('dht-write-failed') return self.automat('dht-write-success')
def doSendRequestService(self, arg): """ Action method. """ if len(self.request_service_packet_id) >= 3: if _Debug: lg.warn('too many service requests to %s' % self.router_idurl) self.automat('service-refused', arg) return service_info = 'service_proxy_server \n' orig_identity = config.conf().getData('services/proxy-transport/my-original-identity').strip() if not orig_identity: orig_identity = my_id.getLocalIdentity().serialize() service_info += orig_identity # for t in gateway.transports().values(): # service_info += '%s://%s' % (t.proto, t.host) # service_info += ' ' newpacket = signed.Packet( commands.RequestService(), my_id.getLocalID(), my_id.getLocalID(), packetid.UniqueID(), service_info, self.router_idurl,) packet_out.create(newpacket, wide=False, callbacks={ commands.Ack(): self._on_request_service_ack, commands.Fail(): self._on_request_service_fail},) self.request_service_packet_id.append(newpacket.PacketID)
def doStartListening(self, arg): """ Action method. """ try: _, info = arg self.router_proto_host = (info.proto, info.host) except: try: s = config.conf().getString('services/proxy-transport/current-router').strip() _, router_proto, router_host = s.split(' ') self.router_proto_host = (router_proto, router_host) except: lg.exc() self.router_identity = identitycache.FromCache(self.router_idurl) config.conf().setString('services/proxy-transport/current-router', '%s %s %s' % ( self.router_idurl, self.router_proto_host[0], self.router_proto_host[1])) if ReadMyOriginalIdentitySource(): lg.warn('my original identity is not empty') else: config.conf().setData('services/proxy-transport/my-original-identity', my_id.getLocalIdentity().serialize()) self.request_service_packet_id = [] callback.insert_inbox_callback(0, self._on_inbox_packet_received) if _Debug: lg.out(2, 'proxy_receiver.doStartListening !!!!!!! router: %s at %s://%s' % ( self.router_idurl, self.router_proto_host[0], self.router_proto_host[1]))
def start(): """ """ global _StartingDeferred if _StartingDeferred: lg.warn('driver.start already called') return _StartingDeferred if _Debug: lg.out(_DebugLevel - 6, 'driver.start') dl = [] for name in boot_up_order(): svc = services().get(name, None) if not svc: raise ServiceNotFound(name) if not svc.enabled(): continue if svc.state == 'ON': continue d = Deferred() dl.append(d) svc.automat('start', d) if len(dl) == 0: return succeed(1) _StartingDeferred = DeferredList(dl) _StartingDeferred.addCallback(on_started_all_services) return _StartingDeferred
def datagramReceived(self, datagram, address): global _LastDatagramReceivedTime _LastDatagramReceivedTime = time.time() inp = cStringIO.StringIO(datagram) try: # version = datagram[0] # command = datagram[1] # payload = datagram[2:] # payloadsz = len(payload) datagramsz = len(datagram) version = inp.read(1) command = inp.read(1) except: inp.close() lg.exc() return if version != self.SoftwareVersion: inp.close() lg.warn('- different software version: %s' % version) return if _Debug: lg.out(_DebugLevel, '<<< [%s] (%d bytes) from %s, total %d bytes received' % ( command, datagramsz, str(address), self.bytes_in)) # self.bytes_in += datagramsz handled = False try: if self.command_filter_callback: handled = self.command_filter_callback(command, datagram, inp, address) except: lg.exc() payload = inp.read() inp.close() if not handled: self.run_callbacks((command, payload), address) self.bytes_in += datagramsz
def on_received_ack_packet(self, payload): inp = cStringIO.StringIO(payload) try: stream_id = struct.unpack('i', inp.read(4))[0] except: inp.close() lg.exc() # self.session.automat('shutdown') return if stream_id not in self.streams.keys(): inp.close() # if not self.receivedFiles.has_key(stream_id): # lg.warn('unknown stream_id=%d in ACK packet from %s' % ( # stream_id, self.session.peer_address)) # self.session.automat('shutdown') if stream_id in self.dead_streams: # print 'old ack', stream_id pass else: if _Debug: lg.warn('%s - what a stream ???' % stream_id) # self.session.automat('shutdown') return try: self.streams[stream_id].on_ack_received(inp) except: lg.exc() self.session.automat('shutdown') inp.close()
def cancel_task(cmd, first_parameter): if not A(): lg.out(10, 'raid_worker.cancel_task SKIP _RaidWorker is not started') return False task_id = None found = False for t_id, t_cmd, t_params in A().tasks: if cmd == t_cmd and first_parameter == t_params[0]: try: A().tasks.remove(t_id, t_cmd, t_params) lg.out(10, 'raid_worker.cancel_task found pending task %d, canceling %s' % (t_id, first_parameter)) except: lg.warn('failed removing pending task %d, %s' % (t_id, first_parameter)) found = True break # for i in xrange(len(A().tasks)): # t_id, t_cmd, t_params = A().tasks[i] # if cmd == t_cmd and first_parameter == t_params[0]: # lg.out(10, 'raid_worker.cancel_task found pending task %d, canceling' % t_id) # A().tasks.pop(i) # found = True # break for task_id, task_data in A().activetasks.items(): t_proc, t_cmd, t_params = task_data if cmd == t_cmd and first_parameter == t_params[0]: lg.out(10, 'raid_worker.cancel_task found started task %d, aborting process %d' % (task_id, t_proc.tid)) A().processor.cancel(t_proc.tid) found = True break if not found: lg.warn('task not found: %s %s' % (cmd, first_parameter)) return False return True
def shutdown(): from logs import lg from main import config from system import bpio lg.out(2, 'bpmain.shutdown') import shutdowner shutdowner.A('reactor-stopped') from automats import automat automat.objects().clear() if len(automat.index()) > 0: lg.warn('%d automats was not cleaned' % len(automat.index())) for a in automat.index().keys(): lg.out(2, ' %r' % a) else: lg.out(2, 'bpmain.shutdown automat.objects().clear() SUCCESS, no state machines left in memory') config.conf().removeCallback('logs/debug-level') lg.out(2, 'bpmain.shutdown currently %d threads running:' % len(threading.enumerate())) for t in threading.enumerate(): lg.out(2, ' ' + str(t)) lg.out(2, 'bpmain.shutdown finishing and closing log file, EXIT') automat.CloseLogFile() lg.close_log_file() if bpio.Windows() and bpio.isFrozen(): lg.stdout_stop_redirecting() return 0
def doCheckOverride(self, arg): """ Action method. """ target = arg.CreatorID idsrc = arg.Payload try: new_ident = identity.identity(xmlsrc=idsrc) except: lg.out(_DebugLevel, 'payload: [%s]' % idsrc) lg.exc() return if not new_ident.isCorrect() or not new_ident.Valid(): lg.warn('incoming identity is not valid') return if not self._is_my_contacts_present_in_identity(new_ident): cur_contacts = [] try: cur_contacts = identity.identity( xmlsrc=identitycache.ReadOverriddenIdentityXMLSource(target) ).getContacts() except: lg.exc() return if _Debug: lg.out(_DebugLevel, 'proxy_router.doCheckOverride override identity for %s' % arg.CreatorID) lg.out(_DebugLevel, ' current override contacts is : %s' % cur_contacts) lg.out(_DebugLevel, ' new contacts is : %s' % new_ident.getContacts()) identitycache.OverrideIdentity(arg.CreatorID, idsrc) else: if _Debug: lg.out(_DebugLevel, 'proxy_router.doCheckOverride skip override, found my contacts in identity from %s' % arg.CreatorID) lg.out(_DebugLevel, ' known contacts is : %s' % new_ident.getContacts())
def shutdown(): global _Loop global _LoopValidate global _LoopUpdateCustomers global _LoopSpaceTime global _CurrentProcess lg.out(4, 'localtester.shutdown ') if _Loop: if _Loop.active(): _Loop.cancel() if _LoopValidate: if _LoopValidate.active(): _LoopValidate.cancel() if _LoopUpdateCustomers: if _LoopUpdateCustomers.active(): _LoopUpdateCustomers.cancel() if _LoopSpaceTime: if _LoopSpaceTime.active(): _LoopSpaceTime.cancel() if alive(): lg.out(4, 'localtester.shutdown is killing bptester') try: _CurrentProcess.kill() except: lg.warn('can not kill bptester') del _CurrentProcess _CurrentProcess = None
def on_tray_icon_command(cmd): from main import shutdowner from services import driver from p2p import network_connector lg.out(2, 'on_tray_icon_command %s' % cmd) if cmd == 'exit': # SendCommandToGUI('exit') shutdowner.A('stop', 'exit') elif cmd == 'restart': # SendCommandToGUI('exit') appList = bpio.find_process(['bpgui.', ]) if len(appList) > 0: shutdowner.A('stop', 'restartnshow') # ('restart', 'show')) else: shutdowner.A('stop', 'restart') # ('restart', '')) elif cmd == 'reconnect': if driver.is_started('service_network'): network_connector.A('reconnect') elif cmd == 'show': show() elif cmd == 'hide': pass # SendCommandToGUI('exit') elif cmd == 'toolbar': pass # SendCommandToGUI('toolbar') else: lg.warn('wrong command: ' + str(cmd))
def cancel(self, request, info): from p2p import p2p_service if not contactsdb.is_customer(request.OwnerID): lg.warn( "got packet from %s, but he is not a customer" % request.OwnerID) return p2p_service.SendFail(request, 'not a customer') if accounting.check_create_customers_quotas(): lg.out(6, 'service_supplier.cancel created a new space file') space_dict = accounting.read_customers_quotas() if request.OwnerID not in space_dict.keys(): lg.warn( "got packet from %s, but not found him in space dictionary" % request.OwnerID) return p2p_service.SendFail(request, 'not a customer') try: free_bytes = int(space_dict['free']) space_dict['free'] = free_bytes + int(space_dict[request.OwnerID]) except: lg.exc() return p2p_service.SendFail(request, 'broken space file') new_customers = list(contactsdb.customers()) new_customers.remove(request.OwnerID) contactsdb.update_customers(new_customers) contactsdb.save_customers() space_dict.pop(request.OwnerID) accounting.write_customers_quotas(space_dict) from supplier import local_tester reactor.callLater(0, local_tester.TestUpdateCustomers) return p2p_service.SendAck(request, 'accepted')
def data_received(self, payload): """ """ from transport.tcp import tcp_connection inp = cStringIO.StringIO(payload) try: file_id = struct.unpack('i', inp.read(4))[0] file_size = struct.unpack('i', inp.read(4))[0] except: inp.close() lg.exc() return inp_data = inp.read() inp.close() if file_id not in self.inboxFiles: if len(self.inboxFiles) >= 2 * MAX_SIMULTANEOUS_OUTGOING_FILES: # too many incoming files, seems remote guy is cheating - drop # that session! lg.warn('too many incoming files, close connection %s' % str(self.connection)) self.connection.automat('disconnect') return self.create_inbox_file(file_id, file_size) self.inboxFiles[file_id].input_data(inp_data) if self.inboxFiles[file_id].is_done(): self.send_data(tcp_connection.CMD_OK, struct.pack('i', file_id)) self.inbox_file_done(file_id, 'finished')
def doRemoveSuppliers(self, arg): """ Action method. """ current_suppliers = contactsdb.suppliers() desired_suppliers = settings.getSuppliersNumberDesired() if len(current_suppliers) < desired_suppliers: lg.warn('must have more suppliers %d<%d' % ( len(current_suppliers), desired_suppliers)) for supplier_idurl in self.dismiss_list: if supplier_idurl not in current_suppliers: lg.warn('%s not a supplier' % supplier_idurl) continue pos = current_suppliers.index(supplier_idurl) # current_suppliers.remove(supplier_idurl) current_suppliers[pos] = '' misc.writeSupplierData( supplier_idurl, 'disconnected', time.strftime('%d-%m-%Y %H:%M:%S')) current_suppliers = current_suppliers[:desired_suppliers] contactsdb.update_suppliers(current_suppliers) contactsdb.save_suppliers() if settings.NewWebGUI(): from web import control control.on_suppliers_changed(current_suppliers) else: from web import webcontrol webcontrol.OnListSuppliers() lg.out(2, '!!!!!!!!!!! REMOVE SUPPLIERS : %d' % len(self.dismiss_list))
def on_inbox_file_register_failed(self, err, file_id): if _Debug: lg.warn( 'failed to register, file_id=%s err:\n%s' % (str(file_id), str(err))) lg.out(_DebugLevel - 8, ' close session %s' % self.session) self.connection.automat('disconnect')
def loop(first_start=False): global _ShedulerTask lg.out(4, 'os_windows_update.loop mode=' + str(settings.getUpdatesMode())) if settings.getUpdatesMode() == settings.getUpdatesModeValues()[2]: lg.out(4, 'os_windows_update.loop is finishing. updates is turned off') return shed = schedule.Schedule(from_dict=read_shedule_dict()) nexttime = shed.next_time() # nexttime = next(d) if first_start: nexttime = time.time() if nexttime is None: lg.out(1, 'os_windows_update.loop ERROR calculating shedule interval') return if nexttime < 0: lg.out(1, 'os_windows_update.loop nexttime=%s' % str(nexttime)) return # DEBUG # nexttime = time.time() + 60.0 delay = nexttime - time.time() if delay < 0: lg.warn('delay=%s %s' % (str(delay), shed)) delay = 0 lg.out(6, 'os_windows_update.loop run_sheduled_update will start after %s seconds (%s hours)' % (str(delay), str(delay / 3600.0))) _ShedulerTask = reactor.callLater(delay, run_sheduled_update)
def start(self): from twisted.internet import reactor from twisted.internet.defer import Deferred from logs import lg from transport.proxy import proxy_interface from transport import network_transport from transport import gateway from main.config import conf if len(self._available_transports()) == 0: lg.warn('no transports available') return False self._check_update_original_identity() self.starting_deferred = Deferred() self.interface = proxy_interface.GateInterface() self.transport = network_transport.NetworkTransport( 'proxy', self.interface) self.transport.automat( 'init', (gateway.listener(), self._on_transport_state_changed)) reactor.callLater(0, self.transport.automat, 'start') conf().addCallback('services/proxy-transport/enabled', self._on_enabled_disabled) conf().addCallback('services/proxy-transport/sending-enabled', self._on_sending_enabled_disabled) conf().addCallback('services/proxy-transport/receiving-enabled', self._on_receiving_enabled_disabled) return self.starting_deferred
def Message(request): """ Message came in for us so we: 1) check that it is a correspondent 2) decrypt message body 3) save on local HDD 4) call the GUI 5) send an "Ack" back to sender. """ global _IncomingMessageCallbacks lg.out(6, "message.Message from " + str(request.OwnerID)) # senderidentity = contactsdb.get_correspondent_identity(request.OwnerID) # if not senderidentity: # lg.warn("had sender not in correspondents list " + request.OwnerID) # # return # contactsdb.add_correspondent(request.OwnerID, nameurl.GetName(request.OwnerID)) # contactsdb.save_correspondents() new_message = misc.StringToObject(request.Payload) if new_message is None: lg.warn("wrong Payload, can not extract message from request") return for old_id, old_message in inbox_history(): if old_id == request.PacketID: lg.out(6, "message.Message SKIP, message %s found in history" % old_message) return inbox_history().append((request.PacketID, new_message)) clear_message = new_message.ClearBody() # SaveMessage(clearmessage) from p2p import p2p_service p2p_service.SendAck(request) for cb in _IncomingMessageCallbacks: cb(request, clear_message)
def shutdown(): global _LocalStorage if _LocalStorage is None: lg.warn('local storage is not initialized') return _LocalStorage.close() _LocalStorage = None
def VerifyExistingRouter(): if ReadCurrentRouter() and not ReadMyOriginalIdentitySource(): lg.warn('current router is set, but my original identity is empty') return False if not ReadCurrentRouter() and ReadMyOriginalIdentitySource(): lg.warn('current router is not set, but some wrong data found as original identity') return False return True
def doRestoreMyIdentity(self, arg): """ Action method. """ modified = my_id.rebuildLocalIdentity() if not modified: lg.warn('my identity was not modified') config.conf().setData('services/proxy-transport/my-original-identity', '')
def interface_receiving_started(host, new_options={}): """ """ if proxy(): return proxy().callRemote('receiving_started', 'udp', host, new_options) lg.warn('transport_udp is not ready') return fail('transport_udp is not ready')
def interface_receiving_failed(error_code=None): """ """ if proxy(): return proxy().callRemote('receiving_failed', 'udp', error_code) lg.warn('transport_udp is not ready') return fail('transport_udp is not ready')
def interface_disconnected(result=None): """ """ if proxy(): return proxy().callRemote('disconnected', 'udp', result) lg.warn('transport_udp is not ready') return fail('transport_udp is not ready')
def doForwardOutboxPacket(self, arg): """ Action method. """ # decrypt with my key and send to outside world newpacket, info = arg block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR reading data from %s' % newpacket.RemoteID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData) inpt = BytesIO(padded_data[:int(block.Length)]) payload = serialization.BytesToDict(inpt.read()) inpt.close() sender_idurl = payload['f'] # from receiver_idurl = payload['t'] # to wide = payload['w'] # wide routed_data = payload['p'] # payload except: lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR reading data from %s' % newpacket.RemoteID) lg.exc() try: inpt.close() except: pass return route = self.routes.get(sender_idurl, None) if not route: inpt.close() lg.warn('route with %s not found' % (sender_idurl)) p2p_service.SendFail(newpacket, 'route not exist', remote_idurl=sender_idurl) return routed_packet = signed.Unserialize(routed_data) if not routed_packet or not routed_packet.Valid(): lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR unserialize packet from %s' % newpacket.RemoteID) return # send the packet directly to target user_id # we pass not callbacks because all response packets from this call will be also re-routed pout = packet_out.create( routed_packet, wide=wide, callbacks={}, target=receiver_idurl, ) if _Debug: lg.out( _DebugLevel, '>>>Relay-IN-OUT %d bytes from %s at %s://%s :' % ( len(routed_data), nameurl.GetName(sender_idurl), info.proto, info.host, )) lg.out( _DebugLevel, ' routed to %s : %s' % (nameurl.GetName(receiver_idurl), pout)) del block del routed_data del padded_data del route del inpt del session_key del routed_packet
def doForwardInboxPacket(self, arg): """ Action method. """ # encrypt with proxy_receiver()'s key and sent to man behind my proxy receiver_idurl, newpacket, info = arg route_info = self.routes.get(receiver_idurl, None) if not route_info: lg.warn('route with %s not found for inbox packet: %s' % (receiver_idurl, newpacket)) return hosts = route_info['address'] if len(hosts) == 0: lg.warn( 'route with %s do not have actual info about the host, use identity contacts instead' % receiver_idurl) hosts = route_info['contacts'] if len(hosts) == 0: lg.warn('has no known contacts for route with %s' % receiver_idurl) return receiver_proto, receiver_host = hosts[0] publickey = route_info['publickey'] block = encrypted.Block( CreatorID=my_id.getLocalID(), BackupID='routed incoming data', BlockNumber=0, SessionKey=key.NewSessionKey(), SessionKeyType=key.SessionKeyType(), LastBlock=True, Data=newpacket.Serialize(), EncryptKey=lambda inp: key.EncryptOpenSSHPublicKey(publickey, inp), ) raw_data = block.Serialize() routed_packet = signed.Packet( commands.Relay(), newpacket.OwnerID, my_id.getLocalID(), newpacket.PacketID, raw_data, receiver_idurl, ) pout = packet_out.create( newpacket, wide=False, callbacks={}, route={ 'packet': routed_packet, 'proto': receiver_proto, 'host': receiver_host, 'remoteid': receiver_idurl, 'description': ('Relay_%s[%s]_%s' % (newpacket.Command, newpacket.PacketID, nameurl.GetName(receiver_idurl))), }, ) if _Debug: lg.out( _DebugLevel, '<<<Relay-IN-OUT %s %s:%s' % ( str(newpacket), info.proto, info.host, )) lg.out( _DebugLevel, ' sent to %s://%s with %d bytes in %s' % (receiver_proto, receiver_host, len(raw_data), pout)) del raw_data del block del newpacket del routed_packet
def _on_inbox_packet_received(self, newpacket, info, status, error_message): if _Debug: lg.out(_DebugLevel, 'proxy_router._on_inbox_packet_received %s' % newpacket) lg.out( _DebugLevel, ' creator=%s owner=%s' % ( newpacket.CreatorID, newpacket.OwnerID, )) lg.out( _DebugLevel, ' sender=%s remote_id=%s' % ( info.sender_idurl, newpacket.RemoteID, )) lg.out(_DebugLevel, ' routes=%s' % list(self.routes.keys())) # first filter all traffic addressed to me if newpacket.RemoteID == my_id.getLocalID(): # check command type, filter Routed traffic first if newpacket.Command == commands.Relay(): # look like this is a routed packet addressed to someone else if newpacket.CreatorID in list(self.routes.keys()): # sent by proxy_sender() from node A : a man behind proxy_router() # addressed to some third node B in outside world - need to route # A is my consumer and B is a recipient which A wants to contant if _Debug: lg.out( _DebugLevel, ' sending "routed-outbox-packet-received" event' ) self.automat('routed-outbox-packet-received', (newpacket, info)) return True # looke like we do not know this guy, so why he is sending us routed traffic? lg.warn( 'unknown %s from %s received, no known routes with %s' % (newpacket, newpacket.CreatorID, newpacket.CreatorID)) self.automat('unknown-packet-received', (newpacket, info)) return True # and this is not a Relay packet, Identity elif newpacket.Command == commands.Identity(): # this is a "propagate" packet from node A addressed to this proxy router if newpacket.CreatorID in list(self.routes.keys()): # also we need to "reset" overriden identity # return False so that other services also can process that Identity() if _Debug: lg.out( _DebugLevel, ' sending "known-identity-received" event') self.automat('known-identity-received', newpacket) return False # this node is not yet in routers list, # but seems like it tries to contact me # return False so that other services also can process that Identity() if _Debug: lg.out( _DebugLevel, ' sending "unknown-identity-received" event') self.automat('unknown-identity-received', newpacket) return False # it can be a RequestService or CancelService packets... # elif newpacket.Command == commands.RequestService(): # self.automat(event_string, arg) # 'request-route-received'.... # so this packet may be of any kind, but addressed to me # for example if I am a supplier for node A he will send me packets in usual way # need to skip this packet here and process it as a normal inbox packet if _Debug: lg.out( _DebugLevel, ' proxy_router() SKIP packet %s from %s addressed to me' % (newpacket, newpacket.CreatorID)) return False # this packet was addressed to someone else # it can be different scenarios, if can not found valid scenario - must skip the packet receiver_idurl = None known_remote_id = newpacket.RemoteID in list(self.routes.keys()) known_creator_id = newpacket.CreatorID in list(self.routes.keys()) known_owner_id = newpacket.OwnerID in list(self.routes.keys()) if known_remote_id: # incoming packet from node B addressed to node A behind that proxy, capture it! receiver_idurl = newpacket.RemoteID if _Debug: lg.out( _DebugLevel, ' proxy_router() ROUTED packet %s from %s to %s' % (newpacket, info.sender_idurl, receiver_idurl)) self.automat('routed-inbox-packet-received', (receiver_idurl, newpacket, info)) return True # uknown RemoteID... # Data() packets may have two cases: a new Data or response with existing Data # in that case RemoteID of the Data packet is not pointing to the real recipient # need to filter this scenario here and do workaround if known_creator_id or known_owner_id: # response from node B addressed to node A, after Retrieve() from A who owns this Data() # a Data packet sent by node B : a man from outside world # addressed to a man behind this proxy_router() - need to route to node A # but who is node A? Creator or Owner? based_on = '' if known_creator_id: receiver_idurl = newpacket.CreatorID based_on = 'creator' else: receiver_idurl = newpacket.OwnerID based_on = 'owner' if _Debug: lg.out( _DebugLevel, ' proxy_router() based on %s ROUTED packet %s from %s to %s' % (based_on, newpacket, info.sender_idurl, receiver_idurl)) self.automat('routed-inbox-packet-received', (receiver_idurl, newpacket, info)) return True # this packet is not related to any of the routes if _Debug: lg.out( _DebugLevel, ' proxy_router() SKIP packet %s from %s : no relations found' % (newpacket, newpacket.CreatorID)) return False
def doRequestPackets(self, arg): """ Action method. """ if _Debug: lg.out( _DebugLevel, 'restore_worker.doRequestPackets for %s at block %d' % ( self.BackupID, self.BlockNumber, )) from customer import io_throttle packetsToRequest = [] for SupplierNumber in range(self.EccMap.datasegments): SupplierID = contactsdb.supplier(SupplierNumber, customer_idurl=self.CustomerIDURL) if not SupplierID: lg.warn('bad supplier at position %s' % SupplierNumber) continue if contact_status.isOffline(SupplierID): lg.warn('offline supplier: %s' % SupplierID) continue if self.OnHandData[SupplierNumber]: if _Debug: lg.out( _DebugLevel, ' OnHandData is True for supplier %d' % SupplierNumber) continue packetsToRequest.append( (SupplierID, packetid.MakePacketID(self.BackupID, self.BlockNumber, SupplierNumber, 'Data'))) for SupplierNumber in range(self.EccMap.paritysegments): SupplierID = contactsdb.supplier(SupplierNumber, customer_idurl=self.CustomerIDURL) if not SupplierID: lg.warn('bad supplier at position %s' % SupplierNumber) continue if contact_status.isOffline(SupplierID): lg.warn('offline supplier: %s' % SupplierID) continue if self.OnHandParity[SupplierNumber]: if _Debug: lg.out( _DebugLevel, ' OnHandParity is True for supplier %d' % SupplierNumber) continue packetsToRequest.append( (SupplierID, packetid.MakePacketID(self.BackupID, self.BlockNumber, SupplierNumber, 'Parity'))) if _Debug: lg.out(_DebugLevel, ' packets to request: %s' % packetsToRequest) requests_made = 0 already_requested = 0 for SupplierID, packetID in packetsToRequest: if io_throttle.HasPacketInRequestQueue(SupplierID, packetID): already_requested += 1 if _Debug: lg.out( _DebugLevel, ' packet already in request queue: %s %s' % ( SupplierID, packetID, )) continue io_throttle.QueueRequestFile(self._on_packet_request_result, self.CreatorID, packetID, self.CreatorID, SupplierID) requests_made += 1 del packetsToRequest if requests_made: if _Debug: lg.out( _DebugLevel, " requested %d packets for block %d" % (requests_made, self.BlockNumber)) else: if already_requested: if _Debug: lg.out( _DebugLevel, " found %d already requested packets for block %d" % (already_requested, self.BlockNumber)) else: lg.warn('no requests made for block %d' % self.BlockNumber) self.automat('request-failed', None)
def _on_router_contact_status_offline(self, oldstate, newstate, event_string, args): lg.warn('router contact status offline: %s->%s after "%s"' % (oldstate, newstate, event_string, ))
def interface_transport_initialized(xmlrpcurl): if proxy(): return proxy().callRemote( 'transport_initialized', 'proxy', xmlrpcurl).addErrback(proxy_errback) lg.warn('transport_proxy is not ready') return fail(Exception('transport_proxy is not ready')).addErrback(proxy_errback)
def load_suppliers(path=None, customer_idurl=None, all_customers=False): """ Load suppliers list from disk. """ if all_customers: list_local_customers = list(os.listdir(settings.SuppliersDir())) if _Debug: lg.out( _DebugLevel, 'contactsdb.load_suppliers %d known customers' % len(list_local_customers)) for customer_id in list_local_customers: if not global_id.IsValidGlobalUser(customer_id): lg.warn('invalid customer record %s found in %s' % (customer_id, settings.SuppliersDir())) continue path = os.path.join(settings.SuppliersDir(), customer_id, 'supplierids') lst = bpio._read_list(path) if lst is None: lg.warn('did not found suppliers ids at %s' % path) continue one_customer_idurl = global_id.GlobalUserToIDURL(customer_id) if not id_url.is_cached(one_customer_idurl): lg.warn('customer identity %r not cached yet' % one_customer_idurl) continue if not one_customer_idurl.is_latest(): latest_customer_path = os.path.join(settings.SuppliersDir(), one_customer_idurl.to_id()) old_customer_path = os.path.join(settings.SuppliersDir(), customer_id) if not os.path.exists(latest_customer_path): os.rename(old_customer_path, latest_customer_path) lg.info( 'detected and processed idurl rotate when loading suppliers for customer : %r -> %r' % (customer_id, one_customer_idurl.to_id())) else: bpio._dir_remove(old_customer_path) lg.warn('found old customer dir %r and removed' % old_customer_path) continue lst = list(map(lambda i: i if id_url.is_cached(i) else b'', lst)) set_suppliers(lst, customer_idurl=one_customer_idurl) if _Debug: lg.out( _DebugLevel, ' loaded %d known suppliers for customer %r' % (len(lst), one_customer_idurl)) return True if not customer_idurl: customer_idurl = my_id.getLocalID() customer_idurl = id_url.field(customer_idurl) if path is None: path = os.path.join(settings.SuppliersDir(), global_id.UrlToGlobalID(customer_idurl), 'supplierids') lst = bpio._read_list(path) if lst is None: lst = list() lst = list(map(lambda i: i if id_url.is_cached(i) else b'', lst)) set_suppliers(lst, customer_idurl=customer_idurl) if _Debug: lg.out(_DebugLevel, 'contactsdb.load_suppliers %d items from %s' % (len(lst), path)) return True
def interface_receiving_started(host, new_options={}): if proxy(): return proxy().callRemote('receiving_started', 'proxy', host, new_options) lg.warn('transport_proxy is not ready') return fail(Failure(Exception('transport_proxy is not ready')))
def interface_transport_initialized(xmlrpcurl): if proxy(): return proxy().callRemote('transport_initialized', 'proxy', xmlrpcurl) lg.warn('transport_proxy is not ready') return fail(Failure(Exception('transport_proxy is not ready')))
def audit_private_key(key_id, untrusted_idurl, timeout=10): """ Be sure remote user posses given private key. I need to posses the public key to be able to audit. I will generate a random string, encrypt it with given key public key and send encrypted string to him. He will decrypt and send me back original string. Returns Deferred object. """ if _Debug: lg.out( _DebugLevel, 'key_ring.audit_private_key testing %s from %s' % (key_id, untrusted_idurl)) result = Deferred() recipient_id_obj = identitycache.FromCache(untrusted_idurl) if not recipient_id_obj: lg.warn('not found "%s" in identity cache' % untrusted_idurl) result.errback( Exception('not found "%s" in identity cache' % untrusted_idurl)) return result key_alias, creator_idurl = my_keys.split_key_id(key_id) if not key_alias or not creator_idurl: lg.warn('wrong key_id') result.errback(Exception('wrong key_id')) return result private_test_sample = key.NewSessionKey() if untrusted_idurl == creator_idurl and key_alias == 'master': lg.warn('doing audit of master key (private part) of remote user') private_test_encrypted_sample = recipient_id_obj.encrypt( private_test_sample) else: if not my_keys.is_key_registered(key_id): lg.warn('unknown key: "%s"' % key_id) result.errback(Exception('unknown key: "%s"' % key_id)) return result private_test_encrypted_sample = my_keys.encrypt( key_id, private_test_sample) json_payload = { 'key_id': key_id, 'audit': { 'public_sample': '', 'private_sample': base64.b64encode(private_test_encrypted_sample), } } raw_payload = json.dumps(json_payload) block = encrypted.Block( BackupID=key_id, Data=raw_payload, SessionKey=key.NewSessionKey(), # encrypt data using public key of recipient EncryptKey=lambda inp: recipient_id_obj.encrypt(inp), ) encrypted_payload = block.Serialize() p2p_service.SendAuditKey( remote_idurl=recipient_id_obj.getIDURL(), encrypted_payload=encrypted_payload, packet_id=key_id, timeout=timeout, callbacks={ commands.Ack(): lambda response, info: _on_audit_private_key_response( response, info, key_id, untrusted_idurl, private_test_sample, result), commands.Fail(): lambda response, info: result.errback(Exception(response)), None: lambda pkt_out: result.errback(Exception('timeout')), # timeout }, ) return result
def Identity(newpacket, send_ack=True): """ Normal node or Identity server is sending us a new copy of an identity for a contact of ours. Checks that identity is signed correctly. Sending requests to cache all sources (other identity servers) holding that identity. """ # TODO: move to service_gateway newxml = newpacket.Payload newidentity = identity.identity(xmlsrc=newxml) # SECURITY # check that identity is signed correctly # old public key matches new one # this is done in `UpdateAfterChecking()` idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.info('received new identity %s rev %r' % (idurl, newidentity.getRevisionValue())) if not identitycache.UpdateAfterChecking(idurl, newxml): lg.warn("ERROR has non-Valid identity") return False if my_id.isLocalIdentityReady(): if newidentity.getPublicKey() == my_id.getLocalIdentity().getPublicKey( ): if newidentity.getRevisionValue() > my_id.getLocalIdentity( ).getRevisionValue(): lg.warn( 'received my own identity from another user, but with higher revision number' ) reactor.callLater(0, my_id.rebuildLocalIdentity, new_revision=newidentity.getRevisionValue() + 1) # @UndefinedVariable return False latest_identity = id_url.get_latest_ident(newidentity.getPublicKey()) if latest_identity: if latest_identity.getRevisionValue() > newidentity.getRevisionValue(): # check if received identity is the most recent revision number we every saw for that remote user # in case we saw same identity with higher revision number need to reply with Fail packet and notify user # this may happen after identity restore - the user starts counting revision number from 0 # but other nodes already store previous copies, user just need to jump to the most recent revision number lg.warn( 'received new identity with out-dated revision number from %r' % idurl) ident_packet = signed.Packet( Command=commands.Identity(), OwnerID=latest_identity.getIDURL(), CreatorID=latest_identity.getIDURL(), PacketID='identity:%s' % packetid.UniqueID(), Payload=latest_identity.serialize(), RemoteID=idurl, ) reactor.callLater(0, packet_out.create, outpacket=ident_packet, wide=True, callbacks={}, keep_alive=False) # @UndefinedVariable return False # Now that we have ID we can check packet if not newpacket.Valid(): # If not valid do nothing lg.warn("not Valid packet from %s" % idurl) return False if not send_ack: if _Debug: lg.out( _DebugLevel, "p2p_service.Identity %s idurl=%s remoteID=%r skip sending Ack()" % (newpacket.PacketID, idurl, newpacket.RemoteID)) return True if newpacket.OwnerID == idurl: if _Debug: lg.out( _DebugLevel, "p2p_service.Identity %s idurl=%s remoteID=%r sending wide Ack()" % (newpacket.PacketID, idurl, newpacket.RemoteID)) else: if _Debug: lg.out( _DebugLevel, "p2p_service.Identity %s idurl=%s remoteID=%r but packet ownerID=%s sending wide Ack()" % ( newpacket.PacketID, idurl, newpacket.RemoteID, newpacket.OwnerID, )) # wide=True : a small trick to respond to all his contacts reactor.callLater(0, SendAck, newpacket, wide=True) # @UndefinedVariable return True
def interface_receiving_failed(error_code=None): if proxy(): return proxy().callRemote('receiving_failed', 'proxy', error_code) lg.warn('transport_proxy is not ready') return fail(Failure(Exception('transport_proxy is not ready')))
def doProcessRequest(self, arg): """ Action method. """ global _MaxRoutesNumber json_payload, request, info = arg user_id = request.CreatorID #--- commands.RequestService() if request.Command == commands.RequestService(): if len(self.routes) >= _MaxRoutesNumber: if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest RequestService rejected: too many routes' ) lg.out(_DebugLevel, ' %s' % pprint.pformat(self.routes)) p2p_service.SendAck(request, 'rejected', wide=True) else: try: # service_info = request.Payload # idsrc = service_info.lstrip('service_proxy_server').strip() idsrc = json_payload['identity'] cached_id = identity.identity(xmlsrc=idsrc) except: lg.out(_DebugLevel, 'payload: [%s]' % request.Payload) lg.exc() return if not cached_id.Valid(): lg.warn('incoming identity is not valid') return if not cached_id.isCorrect(): lg.warn('incoming identity is not correct') return if user_id != cached_id.getIDURL(): lg.warn( 'incoming identity is not belong to request packet creator' ) return if contactsdb.is_supplier(user_id): if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest RequestService rejected: this user is my supplier' ) p2p_service.SendAck(request, 'rejected', wide=True) return oldnew = '' if user_id not in list(self.routes.keys()): # accept new route oldnew = 'NEW' self.routes[user_id] = {} else: # accept existing routed user oldnew = 'OLD' if not self._is_my_contacts_present_in_identity(cached_id): if _Debug: lg.out(_DebugLevel, ' DO OVERRIDE identity for %s' % user_id) identitycache.OverrideIdentity(user_id, idsrc) else: if _Debug: lg.out( _DebugLevel, ' SKIP OVERRIDE identity for %s' % user_id) self.routes[user_id]['time'] = time.time() self.routes[user_id]['identity'] = idsrc self.routes[user_id]['publickey'] = cached_id.publickey self.routes[user_id][ 'contacts'] = cached_id.getContactsAsTuples() self.routes[user_id]['address'] = [] self._write_route(user_id) active_user_sessions = gateway.find_active_session( info.proto, info.host) if active_user_sessions: user_connection_info = { 'id': active_user_sessions[0].id, 'index': active_user_sessions[0].index, 'proto': info.proto, 'host': info.host, 'idurl': user_id, } active_user_session_machine = automat.objects().get( user_connection_info['index'], None) if active_user_session_machine: active_user_session_machine.addStateChangedCallback( lambda o, n, e, a: self. _on_user_session_disconnected(user_id, o, n, e, a), oldstate='CONNECTED', ) lg.info( 'connected %s routed user, set active session: %s' % (oldnew.capitalize(), user_connection_info)) else: lg.err('not found session state machine: %s' % user_connection_info['index']) else: lg.warn( 'active connection with user at %s:%s was not found' % ( info.proto, info.host, )) lg.warn('current active sessions: %d' % len(gateway.list_active_sessions(info.proto))) self.acks.append( p2p_service.SendAck(request, 'accepted', wide=True)) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest !!!!!!! ACCEPTED %s ROUTE for %s' % (oldnew, user_id)) #--- commands.CancelService() elif request.Command == commands.CancelService(): if user_id in self.routes: # cancel existing route self._remove_route(user_id) self.routes.pop(user_id) identitycache.StopOverridingIdentity(user_id) p2p_service.SendAck(request, 'accepted', wide=True) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest !!!!!!! CANCELLED ROUTE for %s' % user_id) else: p2p_service.SendAck(request, 'rejected', wide=True) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest CancelService rejected : %s is not found in routes' % user_id) lg.out(_DebugLevel, ' %s' % pprint.pformat(self.routes)) else: p2p_service.SendFail(request, 'rejected', wide=True)
def SendListFiles(target_supplier, customer_idurl=None, key_id=None, query_items=[], wide=False, callbacks={}, timeout=None): """ This is used as a request method from your supplier : if you send him a ListFiles() packet he will reply you with a list of stored files in a Files() packet. """ MyID = my_id.getIDURL() if not customer_idurl: customer_idurl = MyID if not str(target_supplier).isdigit(): RemoteID = target_supplier else: RemoteID = contactsdb.supplier(target_supplier, customer_idurl=customer_idurl) if not RemoteID: lg.warn("RemoteID is empty target_supplier=%s" % str(target_supplier)) return None if not key_id: # key_id = global_id.MakeGlobalID(idurl=customer_idurl, key_alias='customer') # TODO: due to issue with "customer" key backup/restore decided to always use my "master" key # to retrieve my list files info from supplier # expect remote user always poses my master public key from my identity. # probably require more work to build more reliable solution without using my master key at all # when my identity rotated supplier first needs to receive my new identity and then sending ListFiles() key_id = my_id.getGlobalID(key_alias='master') else: key_id = my_keys.latest_key_id(key_id) if not my_keys.is_key_registered(key_id) or not my_keys.is_key_private( key_id): lg.warn( 'key %r not exist or public, my "master" key to be used with ListFiles() packet' % key_id) key_id = my_id.getGlobalID(key_alias='master') PacketID = "%s:%s" % ( key_id, packetid.UniqueID(), ) if not query_items: query_items = [ '*', ] Payload = serialization.DictToBytes({ 'items': query_items, }) if _Debug: lg.out( _DebugLevel, "p2p_service.SendListFiles %r to %r of customer %r with query : %r" % ( PacketID, nameurl.GetName(RemoteID), nameurl.GetName(customer_idurl), query_items, )) result = signed.Packet( Command=commands.ListFiles(), OwnerID=MyID, CreatorID=MyID, PacketID=PacketID, Payload=Payload, RemoteID=RemoteID, ) gateway.outbox(result, wide=wide, callbacks=callbacks, response_timeout=timeout) return result
def _on_incoming_contacts_packet(self, newpacket, info): from logs import lg from lib import serialization from lib import strng from supplier import family_member from userid import my_id try: json_payload = serialization.BytesToDict(newpacket.Payload, keys_to_text=True) contacts_type = strng.to_text(json_payload['type']) contacts_space = strng.to_text(json_payload['space']) except: lg.exc() return False if contacts_space != 'family_member': return False if contacts_type == 'suppliers_list': try: customer_idurl = strng.to_bin(json_payload['customer_idurl']) ecc_map = strng.to_text(json_payload['customer_ecc_map']) suppliers_list = list(map(strng.to_bin, json_payload['suppliers_list'])) transaction_revision = json_payload.get('transaction_revision') except: lg.exc() return False if customer_idurl == my_id.getLocalIDURL(): lg.warn('received contacts for my own customer family') return False fm = family_member.by_customer_idurl(customer_idurl) if not fm: lg.warn('family_member() instance not found for incoming %s from %s for customer %r' % ( newpacket, info, customer_idurl, )) return False fm.automat('contacts-received', { 'type': contacts_type, 'packet': newpacket, 'customer_idurl': customer_idurl, 'customer_ecc_map': ecc_map, 'suppliers_list': suppliers_list, 'transaction_revision': transaction_revision, }) return True elif contacts_type == 'supplier_position': try: customer_idurl = strng.to_bin(json_payload['customer_idurl']) ecc_map = strng.to_text(json_payload['customer_ecc_map']) supplier_idurl = strng.to_bin(json_payload['supplier_idurl']) supplier_position = json_payload['supplier_position'] family_snapshot = json_payload.get('family_snapshot') except: lg.exc() return False if customer_idurl == my_id.getLocalIDURL(): lg.warn('received contacts for my own customer family') return False fm = family_member.by_customer_idurl(customer_idurl) if not fm: lg.warn('family_member() instance not found for incoming %s from %s for customer %r' % ( newpacket, info, customer_idurl, )) return False fm.automat('contacts-received', { 'type': contacts_type, 'packet': newpacket, 'customer_idurl': customer_idurl, 'customer_ecc_map': ecc_map, 'supplier_idurl': supplier_idurl, 'supplier_position': supplier_position, 'family_snapshot': family_snapshot, }) return True return False
def _on_supplier_pub_key_failed(self, err, supplier_idurl): lg.warn(err) self.suppliers_responses.pop(supplier_idurl, None) return None
def _on_coins_failed(self, fails): if _Debug: lg.warn(str(fails))
def _on_user_priv_key_failed(self, err): lg.warn(err) self.automat('fail', Exception('private key delivery failed to remote node')) return None
def on_failed(response, error): lg.warn('send files %s failed with %s' % (response, error, ))
def interface_disconnected(result=None): if proxy(): return proxy().callRemote('disconnected', 'proxy', result) lg.warn('transport_proxy is not ready') return fail(Failure(Exception('transport_proxy is not ready')))
def send(customer_idurl, packet_id, format_type, key_id, remote_idurl, query_items=[]): if not query_items: query_items = [ '*', ] parts = global_id.ParseGlobalID(key_id) if parts['key_alias'] == 'master' and parts['idurl'] != my_id.getLocalID(): # lg.warn('incoming ListFiles() request with customer "master" key: %r' % key_id) if not my_keys.is_key_registered(key_id) and identitycache.HasKey( parts['idurl']): lg.info( 'customer public key %r to be registered locally for the first time' % key_id) known_ident = identitycache.FromCache(parts['idurl']) if not my_keys.register_key(key_id, known_ident.getPublicKey()): lg.err( 'failed to register known public key of the customer: %r' % key_id) if not my_keys.is_key_registered(key_id): lg.warn( 'not able to return Files() for customer %s, key %s not registered' % ( customer_idurl, key_id, )) return p2p_service.SendFailNoRequest(customer_idurl, packet_id, response='key not registered') if _Debug: lg.out( _DebugLevel, "list_files.send to %s, customer_idurl=%s, key_id=%s, query_items=%r" % ( remote_idurl, customer_idurl, key_id, query_items, )) ownerdir = settings.getCustomerFilesDir(customer_idurl) plaintext = '' if os.path.isdir(ownerdir): try: for query_path in query_items: plaintext += process_query_item(query_path, parts['key_alias'], ownerdir) except: lg.exc() return p2p_service.SendFailNoRequest( customer_idurl, packet_id, response='list files query processing error') else: lg.warn('did not found customer folder: %s' % ownerdir) if _Debug: lg.out(_DebugLevel, '\n%s' % plaintext) raw_list_files = PackListFiles(plaintext, format_type) block = encrypted.Block( CreatorID=my_id.getLocalID(), BackupID=key_id, Data=raw_list_files, SessionKey=key.NewSessionKey(session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), EncryptKey=key_id, ) encrypted_list_files = block.Serialize() newpacket = p2p_service.SendFiles( idurl=remote_idurl, raw_list_files_info=encrypted_list_files, packet_id=packet_id, callbacks={ commands.Ack(): on_acked, commands.Fail(): on_failed, None: on_timeout, }, ) return newpacket
def on_timeout(pkt_out): lg.warn('send files with %s was timed out' % pkt_out)
def _do_process_inbox_packet(self, *args, **kwargs): newpacket, info, _, _ = args[0] block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.err('reading data from %s' % newpacket.CreatorID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData) inpt = BytesIO(padded_data[:int(block.Length)]) data = inpt.read() except: lg.err('reading data from %s' % newpacket.CreatorID) lg.exc() try: inpt.close() except: pass return inpt.close() routed_packet = signed.Unserialize(data) if not routed_packet: lg.err('unserialize packet failed from %s' % newpacket.CreatorID) return if _Debug: lg.out( _DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % (str(routed_packet), info.proto, info.host, len(data))) if routed_packet.Command == commands.Identity(): if _Debug: lg.out(_DebugLevel, ' found identity in relay packet %s' % routed_packet) newidentity = identity.identity(xmlsrc=routed_packet.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.info('received new identity: %s' % idurl) if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload): lg.warn("ERROR has non-Valid identity") return if routed_packet.Command == commands.Relay( ) and routed_packet.PacketID.lower() == 'identity': if _Debug: lg.out( _DebugLevel, ' found routed identity in relay packet %s' % routed_packet) try: routed_identity = signed.Unserialize(routed_packet.Payload) newidentity = identity.identity(xmlsrc=routed_identity.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.warn('received new "routed" identity: %s' % idurl) if not identitycache.UpdateAfterChecking( idurl, routed_identity.Payload): lg.warn("ERROR has non-Valid identity") return except: lg.exc() # if not routed_packet.Valid(): # lg.err('invalid packet %s from %s' % ( # routed_packet, newpacket.CreatorID, )) # return self.traffic_in += len(data) packet_in.process(routed_packet, info) del block del data del padded_data del inpt del session_key del routed_packet
def on_key_received(newpacket, info, status, error_message): block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'key_ring.on_key_received ERROR reading data from %s' % newpacket.RemoteID) return False try: key_data = block.Data() key_json = json.loads(key_data) key_id = key_json['key_id'] key_id, key_object = my_keys.read_key_info(key_json) if key_object.isPublic(): # received key is a public key if my_keys.is_key_registered(key_id): # but we already have a key with that ID if my_keys.is_key_private(key_id): # we should not overwrite existing private key raise Exception('private key already registered') if my_keys.get_public_key_raw(key_id) != key_object.toString(): # and we should not overwrite existing public key as well raise Exception( 'another key already registered with that ID') p2p_service.SendAck(newpacket) lg.warn('received existing public key: %s, skip' % key_id) return True if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') else: lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) if _Debug: lg.info( 'received and stored locally a new key %s, include_private=%s' % (key_id, key_json.get('include_private'))) return True # received key is a private key if my_keys.is_key_registered(key_id): # check if we already have that key if my_keys.is_key_private(key_id): # we have already private key with same ID!!! if my_keys.get_private_key_raw( key_id) != key_object.toString(): # and this is a new private key : we should not overwrite! raise Exception('private key already registered') # this is the same private key p2p_service.SendAck(newpacket) lg.warn('received existing private key: %s, skip' % key_id) return True # but we have a public key with same ID if my_keys.get_public_key_raw( key_id) != key_object.toPublicString(): # and we should not overwrite existing public key as well raise Exception('another key already registered with that ID') lg.info('erasing public key %s' % key_id) my_keys.erase_key(key_id) if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True # no private key with given ID was registered if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False
def _on_user_session_disconnected(self, user_id, oldstate, newstate, event_string, *args, **kwargs): lg.warn('user session disconnected: %s->%s' % (oldstate, newstate)) self.automat('routed-session-disconnected', user_id)
def request(self, json_payload, newpacket, info): from logs import lg from lib import serialization from p2p import p2p_service from stream import p2p_queue try: service_requests_list = json_payload['items'] except: lg.warn("invalid json payload") return p2p_service.SendFail(newpacket, 'invalid json payload') service_responses_list = [] for r_json in service_requests_list: resp = r_json.copy() r_scope = r_json.get('scope', '') r_action = r_json.get('action', '') try: if r_scope == 'queue': if r_action == 'open': resp['result'] = 'denied' if not p2p_queue.open_queue( queue_id=r_json.get('queue_id'), ) else 'OK' elif r_action == 'close': resp['result'] = 'denied' if not p2p_queue.close_queue( queue_id=r_json.get('queue_id'), ) else 'OK' elif r_scope == 'consumer': if r_action == 'start': resp['result'] = 'denied' if not p2p_queue.add_consumer( consumer_id=r_json.get('consumer_id'), ) else 'OK' elif r_action == 'stop': resp[ 'result'] = 'denied' if not p2p_queue.remove_consumer( consumer_id=r_json.get( 'consumer_id'), ) else 'OK' elif r_action == 'add_callback': resp[ 'result'] = 'denied' if not p2p_queue.add_callback_method( consumer_id=r_json.get('consumer_id'), callback_method=r_json.get('method'), ) else 'OK' elif r_action == 'remove_callback': resp[ 'result'] = 'denied' if not p2p_queue.remove_callback_method( consumer_id=r_json.get('consumer_id'), callback_method=r_json.get('method'), ) else 'OK' elif r_action == 'subscribe': resp[ 'result'] = 'denied' if not p2p_queue.subscribe_consumer( consumer_id=r_json.get('consumer_id'), queue_id=r_json.get('queue_id'), ) else 'OK' elif r_action == 'unsubscribe': resp[ 'result'] = 'denied' if not p2p_queue.unsubscribe_consumer( consumer_id=r_json.get('consumer_id'), queue_id=r_json.get('queue_id'), ) else 'OK' elif r_scope == 'producer': resp['result'] = 'denied' resp[ 'reason'] = 'remote requests for producing messages is not allowed' if False: # TODO: do we need that ? if r_action == 'start': resp[ 'result'] = 'denied' if not p2p_queue.add_producer( producer_id=r_json.get( 'producer_id'), ) else 'OK' elif r_action == 'stop': resp[ 'result'] = 'denied' if not p2p_queue.remove_producer( producer_id=r_json.get( 'producer_id'), ) else 'OK' elif r_action == 'connect': resp[ 'result'] = 'denied' if not p2p_queue.connect_producer( producer_id=r_json.get('producer_id'), queue_id=r_json.get('queue_id'), ) else 'OK' elif r_action == 'disconnect': resp[ 'result'] = 'denied' if not p2p_queue.disconnect_producer( producer_id=r_json.get('producer_id'), queue_id=r_json.get('queue_id'), ) else 'OK' except Exception as exc: resp['result'] = 'denied' resp['reason'] = str(exc) service_responses_list.append(resp) lg.out( self.debug_level, 'service_p2p_notifications.request %s:%s is [%s] : %s' % ( r_scope, r_action, resp['result'], resp.get('reason', 'OK'), )) payload = serialization.DictToBytes({ 'items': service_responses_list, }, values_to_text=True) return p2p_service.SendAck(newpacket, payload)
def transfer_key(key_id, trusted_idurl, include_private=False, timeout=10, result=None): if _Debug: lg.out(_DebugLevel, 'key_ring.transfer_key %s -> %s' % (key_id, trusted_idurl)) if not result: result = Deferred() recipient_id_obj = identitycache.FromCache(trusted_idurl) if not recipient_id_obj: lg.warn('not found "%s" in identity cache' % trusted_idurl) result.errback( Exception('not found "%s" in identity cache' % trusted_idurl)) return result key_alias, creator_idurl = my_keys.split_key_id(key_id) if not key_alias or not creator_idurl: lg.warn('wrong key_id') result.errback(Exception('wrong key_id')) return result key_object = my_keys.known_keys().get(key_id) if key_object is None: lg.warn('unknown key: "%s"' % key_id) result.errback(Exception('unknown key: "%s"' % key_id)) return result try: key_json = my_keys.make_key_info(key_object, key_id=key_id, include_private=include_private) except Exception as exc: lg.exc() result.errback(exc) return result key_data = json.dumps(key_json) block = encrypted.Block( BackupID=key_id, Data=key_data, SessionKey=key.NewSessionKey(), # encrypt data using public key of recipient EncryptKey=lambda inp: recipient_id_obj.encrypt(inp), ) encrypted_key_data = block.Serialize() p2p_service.SendKey( remote_idurl=recipient_id_obj.getIDURL(), encrypted_key_data=encrypted_key_data, packet_id=key_id, callbacks={ commands.Ack(): lambda response, info: _on_transfer_key_response( response, info, key_id, result), commands.Fail(): lambda response, info: _on_transfer_key_response( response, info, key_id, result), # commands.Ack(): lambda response, info: result.callback(response), # commands.Fail(): lambda response, info: result.errback(Exception(response)), None: lambda pkt_out: _on_transfer_key_response(None, None, key_id, result), }, timeout=timeout, ) return result
def doStartListening(self, *args, **kwargs): """ Action method. """ try: _, info = args[0] self.router_proto_host = (info.proto, info.host) except: try: s = config.conf().getString( 'services/proxy-transport/current-router').strip() _, router_proto, router_host = s.split(' ') self.router_proto_host = ( router_proto, strng.to_bin(router_host), ) except: lg.exc() self.router_identity = identitycache.FromCache(self.router_idurl) config.conf().setString( 'services/proxy-transport/current-router', '%s %s %s' % ( strng.to_text(self.router_idurl), strng.to_text(self.router_proto_host[0]), strng.to_text(self.router_proto_host[1]), )) current_identity = my_id.getLocalIdentity().serialize(as_text=True) previous_identity = ReadMyOriginalIdentitySource() if previous_identity: lg.warn('my original identity is not empty, SKIP overwriting') lg.out(2, '\nPREVIOUS ORIGINAL IDENTITY:\n%s\n' % current_identity) else: WriteMyOriginalIdentitySource(current_identity) lg.warn('current identity was stored as my-original-identity') self.request_service_packet_id = [] callback.insert_inbox_callback(0, self._on_inbox_packet_received) if contact_status.isKnown(self.router_idurl): contact_status.A(self.router_idurl).addStateChangedCallback( self._on_router_contact_status_connected, newstate='CONNECTED') contact_status.A(self.router_idurl).addStateChangedCallback( self._on_router_contact_status_offline, newstate='OFFLINE') active_router_sessions = gateway.find_active_session( info.proto, info.host) if active_router_sessions: self.router_connection_info = { 'id': active_router_sessions[0].id, 'index': active_router_sessions[0].index, 'proto': info.proto, 'host': info.host, 'idurl': self.router_idurl, 'global_id': global_id.UrlToGlobalID(self.router_idurl), } active_router_session_machine = automat.objects().get( self.router_connection_info['index'], None) if active_router_session_machine: active_router_session_machine.addStateChangedCallback( self._on_router_session_disconnected, oldstate='CONNECTED') lg.info( 'connected to proxy router and set active session: %s' % self.router_connection_info) else: lg.err('not found proxy router session state machine: %s' % self.router_connection_info['index']) else: lg.err( 'active connection with proxy router at %s:%s was not found' % ( info.proto, info.host, )) if _Debug: lg.out( 2, 'proxy_receiver.doStartListening !!!!!!! router: %s at %s://%s' % (self.router_idurl, self.router_proto_host[0], self.router_proto_host[1]))
def _on_router_session_disconnected(self, oldstate, newstate, event_string, args): lg.warn('router session disconnected: %s->%s' % (oldstate, newstate)) self.automat('router-disconnected')
def close_inbox_file(self, file_id): if self.inboxFiles.get(file_id): self.inboxFiles[file_id].close() del self.inboxFiles[file_id] else: lg.warn('incoming TCP file %s not exist' % file_id)