def request(self, json_payload, newpacket, info): from logs import lg from p2p import p2p_service from main import settings # words = newpacket.Payload.split(' ') try: mode = json_payload['action'] except: lg.exc() return p2p_service.SendFail(newpacket, 'invalid json payload') if mode != 'route' and mode != 'listen': lg.out( 8, "service_broadcasting.request DENIED, wrong mode provided : %s" % mode) return p2p_service.SendFail(newpacket, 'invalid request') if not settings.enableBroadcastRouting(): lg.out( 8, "service_broadcasting.request DENIED, broadcast routing disabled" ) return p2p_service.SendFail(newpacket, 'broadcast routing disabled') from broadcast import broadcaster_node if not broadcaster_node.A(): lg.out( 8, "service_broadcasting.request DENIED, broadcast routing disabled" ) return p2p_service.SendFail(newpacket, 'broadcast routing disabled') if broadcaster_node.A().state not in [ 'BROADCASTING', 'OFFLINE', 'BROADCASTERS?', ]: lg.out( 8, "service_broadcasting.request DENIED, current state is : %s" % broadcaster_node.A().state) return p2p_service.SendFail(newpacket, 'currently not broadcasting') if mode == 'route': broadcaster_node.A('new-broadcaster-connected', newpacket.OwnerID) lg.out(8, "service_broadcasting.request ACCEPTED, mode: %s" % words) return p2p_service.SendAck(newpacket, 'accepted') if mode == 'listen': broadcaster_node.A().add_listener(newpacket.OwnerID, ' '.join(words[2:])) lg.out( 8, "service_broadcasting.request ACCEPTED, mode: %s" % words[1]) return p2p_service.SendAck(newpacket, 'accepted') return p2p_service.SendAck(newpacket, 'bad request')
def _on_incoming_suppliers_list(self, inp): # this packet came from another supplier who belongs to that family also incoming_packet = inp['packet'] if _Debug: lg.out( _DebugLevel, 'family_member._on_incoming_suppliers_list with %s' % incoming_packet) if not self.my_info: if _Debug: lg.out(_DebugLevel, ' current DHT info is not yet known, skip') return p2p_service.SendAck(incoming_packet) try: another_ecc_map = inp['customer_ecc_map'] another_suppliers_list = inp['suppliers_list'] another_revision = int(inp['transaction_revision']) except: lg.exc() return p2p_service.SendFail(incoming_packet, response=serialization.DictToBytes( self.my_info)) if _Debug: lg.out( _DebugLevel, ' another_revision=%d another_ecc_map=%s another_suppliers_list=%r' % (another_revision, another_ecc_map, another_suppliers_list)) if another_revision >= int(self.my_info['revision']): self.my_info = self._do_create_revision_from_another_supplier( another_revision, another_suppliers_list, another_ecc_map) lg.info( 'another supplier have more fresh revision, update my info and raise "family-refresh" event' ) self.automat('family-refresh') return p2p_service.SendAck(incoming_packet) if my_id.getLocalIDURL() not in another_suppliers_list: lg.warn( 'another supplier is trying to remove my IDURL from the family of customer %s' % self.customer_idurl) return p2p_service.SendFail(incoming_packet, response=serialization.DictToBytes( self.my_info)) my_position_in_transaction = another_suppliers_list.index( my_id.getLocalIDURL()) my_known_position = self.my_info['suppliers'].index( my_id.getLocalIDURL()) if my_position_in_transaction != my_known_position: lg.warn( 'another supplier is trying to put my IDURL on another position in the family of customer %s' % self.customer_idurl) return p2p_service.SendFail(incoming_packet, response=serialization.DictToBytes( self.my_info)) return p2p_service.SendAck(incoming_packet)
def on_audit_key_received(newpacket, info, status, error_message): """ Callback will be executed when remote user would like to check if I poses given key locally. """ block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'key_ring.on_audit_key_received ERROR reading data from %s' % newpacket.RemoteID) return False try: raw_payload = block.Data() json_payload = serialization.BytesToDict(raw_payload, keys_to_text=True, values_to_text=True) key_id = json_payload['key_id'] json_payload['audit'] public_sample = base64.b64decode( json_payload['audit']['public_sample']) private_sample = base64.b64decode( json_payload['audit']['private_sample']) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False if not my_keys.is_valid_key_id(key_id): p2p_service.SendFail(newpacket, 'invalid key id') return False if not my_keys.is_key_registered(key_id, include_master=True): p2p_service.SendFail(newpacket, 'key not registered') return False if public_sample: response_payload = base64.b64encode( my_keys.encrypt(key_id, public_sample)) p2p_service.SendAck(newpacket, response_payload) if _Debug: lg.info('remote user %s requested audit of public key %s' % (newpacket.OwnerID, key_id)) return True if private_sample: if not my_keys.is_key_private(key_id): p2p_service.SendFail(newpacket, 'private key not registered') return False response_payload = base64.b64encode( my_keys.decrypt(key_id, private_sample)) p2p_service.SendAck(newpacket, response_payload) if _Debug: lg.info('remote user %s requested audit of private key %s' % (newpacket.OwnerID, key_id)) return True p2p_service.SendFail(newpacket, 'wrong audit request') return False
def on_delete_file(newpacket): # TODO: call verify_packet_ownership() if not newpacket.Payload: ids = [ newpacket.PacketID, ] else: ids = strng.to_text(newpacket.Payload).split('\n') filescount = 0 dirscount = 0 lg.warn('going to erase files: %s' % ids) customer_id = global_id.UrlToGlobalID(newpacket.OwnerID) for pcktID in ids: glob_path = global_id.ParseGlobalID(pcktID) if not glob_path['customer']: glob_path = global_id.ParseGlobalID(customer_id + ':' + pcktID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if customer_id != glob_path['customer']: lg.warn('trying to delete file stored for another cusomer') continue # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'not a customer, or file not found') return False if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) # if self.publish_event_supplier_file_modified: # events.send('supplier-file-modified', data=dict( # action='delete', # glob_path=glob_path['path'], # owner_id=newpacket.OwnerID, # )) if _Debug: lg.dbg( _DebugLevel, "from [%s] with %d IDs, %d files and %d folders were removed" % (newpacket.OwnerID, len(ids), filescount, dirscount)) p2p_service.SendAck(newpacket) return True
def request(self, json_payload, newpacket, info): from logs import lg from p2p import p2p_service # words = newpacket.Payload.split(' ') try: # mode = words[1][:10] mode = json_payload['action'] except: lg.exc() return p2p_service.SendFail(newpacket, "invalid json payload") if mode != 'join' and mode != 'write' and mode != 'read': lg.out( 8, "service_accountant.request DENIED, wrong mode provided : %s" % mode) return p2p_service.SendFail(newpacket, "invalid request") from coins import accountant_node if not accountant_node.A(): lg.out( 8, "service_accountant.request DENIED, accountant_node() state machine not exist" ) return p2p_service.SendFail(newpacket, "accountant_node service not started") # if accountant_node.A().state not in ['ACCOUNTANTS?', "READY", "VALID_COIN?", "WRITE_COIN!", ]: # lg.out(8, "service_accountant.request DENIED, accountant_node() state is : %s" % accountant_node.A().state) # return p2p_service.SendFail(request, "accountant_node service # currently unavailable") if mode == 'join': accountant_node.A('accountant-connected', newpacket.OwnerID) # if accountant_node.A().state == 'OFFLINE': # accountant_node.A('start') return p2p_service.SendAck(newpacket, 'accepted') return p2p_service.SendFail(newpacket, 'bad request')
def _on_inbox_packet_received(self, newpacket, info, status, error_message): from logs import lg from contacts import contactsdb from userid import my_id from userid import global_id from storage import backup_control from p2p import commands from p2p import p2p_service if newpacket.Command == commands.Files(): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False if list_files_global_id['idurl'] != my_id.getLocalIDURL(): # lg.warn('skip %s which is from another customer' % newpacket) return False if not contactsdb.is_supplier(newpacket.OwnerID): lg.warn('%s came, but %s is not my supplier' % (newpacket, newpacket.OwnerID, )) # skip Files() if this is not my supplier return False lg.out(self.debug_level, "service_backups._on_inbox_packet_received: %r for us from %s at %s" % ( newpacket, newpacket.CreatorID, info)) if backup_control.IncomingSupplierListFiles(newpacket, list_files_global_id): # send ack packet back p2p_service.SendAck(newpacket) else: p2p_service.SendFail(newpacket) return True return False
def on_private_key_received(newpacket, info, status, error_message): block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'key_ring.received_private_key ERROR reading data from %s' % newpacket.RemoteID) return False try: key_data = block.Data() key_json = json.loads(key_data) key_id = str(key_json['key_id']) # key_alias = key_json['alias'] # key_creator = key_json['creator'] # key_owner = key_json['owner'] private_key_string = str(key_json['private']) except: lg.exc() return False if my_keys.is_key_registered(key_id): lg.warn('key "%s" already registered' % key_id) return True key_object = my_keys.register_key(key_id, private_key_string) if not key_object: return False p2p_service.SendAck(newpacket) return True
def on_files_received(newpacket, info): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False if list_files_global_id['idurl'] != my_id.getLocalID(): # ignore Files() if this is another customer if _Debug: lg.dbg( _DebugLevel, 'ignore incoming %r which is owned by another customer' % newpacket) return False if not contactsdb.is_supplier(newpacket.OwnerID): # ignore Files() if this is not my supplier if _Debug: lg.dbg( _DebugLevel, 'incoming %r received, but %r is not my supplier' % ( newpacket, newpacket.OwnerID, )) return False if _Debug: lg.args( _DebugLevel, "service_backups._on_inbox_packet_received: %r for us from %s at %s" % (newpacket, newpacket.CreatorID, info)) if IncomingSupplierListFiles(newpacket, list_files_global_id): p2p_service.SendAck(newpacket) else: p2p_service.SendFail(newpacket) return True
def cancel(self, json_payload, newpacket, info): from twisted.internet import reactor # @UnresolvedImport from logs import lg from main import events from p2p import p2p_service from contacts import contactsdb from storage import accounting customer_idurl = newpacket.OwnerID if not contactsdb.is_customer(customer_idurl): lg.warn("got packet from %s, but he is not a customer" % customer_idurl) return p2p_service.SendFail(newpacket, 'not a customer') if accounting.check_create_customers_quotas(): lg.out(6, 'service_supplier.cancel created a new space file') space_dict = accounting.read_customers_quotas() if customer_idurl not in list(space_dict.keys()): lg.warn("got packet from %s, but not found him in space dictionary" % customer_idurl) return p2p_service.SendFail(newpacket, 'not a customer') try: free_bytes = int(space_dict[b'free']) space_dict[b'free'] = free_bytes + int(space_dict[customer_idurl]) except: lg.exc() return p2p_service.SendFail(newpacket, 'broken space file') new_customers = list(contactsdb.customers()) new_customers.remove(customer_idurl) contactsdb.update_customers(new_customers) contactsdb.remove_customer_meta_info(customer_idurl) contactsdb.save_customers() space_dict.pop(customer_idurl) accounting.write_customers_quotas(space_dict) from supplier import local_tester reactor.callLater(0, local_tester.TestUpdateCustomers) # @UndefinedVariable lg.out(8, " OLD CUSTOMER: TERMINATED !!!!!!!!!!!!!!") events.send('existing-customer-terminated', dict(idurl=customer_idurl)) return p2p_service.SendAck(newpacket, 'accepted')
def cancel(self, request, info): from main import events from p2p import p2p_service if not contactsdb.is_customer(request.OwnerID): lg.warn( "got packet from %s, but he is not a customer" % request.OwnerID) return p2p_service.SendFail(request, 'not a customer') if accounting.check_create_customers_quotas(): lg.out(6, 'service_supplier.cancel created a new space file') space_dict = accounting.read_customers_quotas() if request.OwnerID not in space_dict.keys(): lg.warn( "got packet from %s, but not found him in space dictionary" % request.OwnerID) return p2p_service.SendFail(request, 'not a customer') try: free_bytes = int(space_dict['free']) space_dict['free'] = free_bytes + int(space_dict[request.OwnerID]) except: lg.exc() return p2p_service.SendFail(request, 'broken space file') new_customers = list(contactsdb.customers()) new_customers.remove(request.OwnerID) contactsdb.update_customers(new_customers) contactsdb.save_customers() space_dict.pop(request.OwnerID) accounting.write_customers_quotas(space_dict) from supplier import local_tester reactor.callLater(0, local_tester.TestUpdateCustomers) lg.out(8, " OLD CUSTOMER: TERMINATED !!!!!!!!!!!!!!") events.send('existing-customer-terminated', dict(idurl=request.OwnerID)) return p2p_service.SendAck(request, 'accepted')
def on_incoming_message(request, info, status, error_message): """ Message came in for us """ global _IncomingMessageCallbacks lg.out(6, "message.Message from " + str(request.OwnerID)) private_message_object = PrivateMessage.deserialize(request.Payload) if private_message_object is None: lg.warn("PrivateMessage deserialize failed, can not extract message from request payload of %d bytes" % len(request.Payload)) try: decrypted_message = private_message_object.decrypt() except: lg.exc() return False for known_id in received_messages_ids(): if known_id == request.PacketID: lg.out(6, "message.Message SKIP, message %s found in history" % known_id) return False received_messages_ids().add(request.PacketID) from p2p import p2p_service p2p_service.SendAck(request) try: for cb in _IncomingMessageCallbacks: cb(request, private_message_object, decrypted_message) except: lg.exc() return True
def _on_delete_file(self, newpacket): import os from logs import lg from system import bpio from lib import strng from userid import global_id from p2p import p2p_service from main import events if not newpacket.Payload: ids = [newpacket.PacketID, ] else: ids = strng.to_text(newpacket.Payload).split('\n') filescount = 0 dirscount = 0 lg.warn('going to erase files: %s' % ids) customer_id = global_id.UrlToGlobalID(newpacket.OwnerID) for pcktID in ids: glob_path = global_id.ParseGlobalID(pcktID) if not glob_path['customer']: glob_path = global_id.ParseGlobalID(customer_id + ':' + pcktID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if customer_id != glob_path['customer']: lg.warn('trying to delete file stored for another cusomer') continue # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'not a customer, or file not found') return False if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) if self.publish_event_supplier_file_modified: events.send('supplier-file-modified', data=dict( action='delete', glob_path=glob_path['path'], owner_id=newpacket.OwnerID, )) lg.out(self.debug_level, "service_supplier._on_delete_file from [%s] with %d IDs, %d files and %d folders were removed" % ( newpacket.OwnerID, len(ids), filescount, dirscount)) p2p_service.SendAck(newpacket) return True
def cancel(self, json_payload, newpacket, info): from logs import lg from contacts import contactsdb from p2p import p2p_service customer_idurl = newpacket.OwnerID if not contactsdb.is_customer(customer_idurl): lg.warn("got packet from %s, but he is not a customer" % customer_idurl) from dht import dht_relations dht_relations.close_customer_supplier_relation(customer_idurl) return p2p_service.SendAck(newpacket, 'accepted')
def cancel(self, json_payload, newpacket, info): from twisted.internet import reactor # @UnresolvedImport from logs import lg from main import events from p2p import p2p_service from contacts import contactsdb from storage import accounting from crypt import my_keys customer_idurl = newpacket.OwnerID try: customer_public_key = json_payload['customer_public_key'] customer_public_key_id = customer_public_key['key_id'] except: customer_public_key = None customer_public_key_id = None customer_ecc_map = json_payload.get('ecc_map') if not contactsdb.is_customer(customer_idurl): lg.warn("got packet from %s, but he is not a customer" % customer_idurl) return p2p_service.SendFail(newpacket, 'not a customer') if accounting.check_create_customers_quotas(): lg.info('created a new space file') space_dict, free_space = accounting.read_customers_quotas() if customer_idurl.to_bin() not in list(space_dict.keys()): lg.warn( "got packet from %s, but not found him in space dictionary" % customer_idurl) return p2p_service.SendFail(newpacket, 'not a customer') try: free_bytes = int(free_space) free_space = free_bytes + int(space_dict[customer_idurl.to_bin()]) except: lg.exc() return p2p_service.SendFail(newpacket, 'broken space file') new_customers = list(contactsdb.customers()) new_customers.remove(customer_idurl) space_dict.pop(customer_idurl.to_bin()) accounting.write_customers_quotas(space_dict, free_space) contactsdb.remove_customer_meta_info(customer_idurl) contactsdb.update_customers(new_customers) contactsdb.save_customers() if customer_public_key_id: my_keys.erase_key(customer_public_key_id) # TODO: erase customer's groups keys also from supplier import local_tester reactor.callLater( 0, local_tester.TestUpdateCustomers) # @UndefinedVariable lg.info("OLD CUSTOMER TERMINATED %r" % customer_idurl) events.send('existing-customer-terminated', data=dict(idurl=customer_idurl, ecc_map=customer_ecc_map)) return p2p_service.SendAck(newpacket, 'accepted')
def _on_incoming_supplier_position(self, inp): # this packet came from the customer, a godfather of the family ;))) incoming_packet = inp['packet'] try: ecc_map = inp['customer_ecc_map'] supplier_idurl = inp['supplier_idurl'] supplier_position = inp['supplier_position'] family_snapshot = inp.get('family_snapshot') or [] except: lg.exc() return None if supplier_idurl != my_id.getLocalIDURL(): return p2p_service.SendFail( incoming_packet, 'contacts packet with supplier position not addressed to me') try: _existing_position = self.my_info['suppliers'].index( supplier_idurl) except: _existing_position = -1 contactsdb.add_customer_meta_info( self.customer_idurl, { 'ecc_map': ecc_map, 'position': supplier_position, 'family_snapshot': family_snapshot, }) if _Debug: lg.out( _DebugLevel, 'family_member._on_incoming_supplier_position stored new meta info for customer %s:\n' % self.customer_idurl) lg.out( _DebugLevel, ' ecc_map=%s position=%s family_snapshot=%s' % ( ecc_map, supplier_position, family_snapshot, )) return p2p_service.SendAck(incoming_packet)
def _on_files_received(self, newpacket, info): from logs import lg from lib import serialization from main import settings from main import events from p2p import p2p_service from storage import backup_fs from storage import backup_control from crypt import encrypted from crypt import my_keys from userid import my_id from userid import global_id from storage import backup_matrix from supplier import list_files from contacts import contactsdb list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False trusted_customer_idurl = list_files_global_id['idurl'] incoming_key_id = list_files_global_id['key_id'] if trusted_customer_idurl == my_id.getGlobalID(): lg.warn('skip %s packet which seems to came from my own supplier' % newpacket) # only process list Files() from other users who granted me access return False if not my_keys.is_valid_key_id(incoming_key_id): lg.warn('ignore, invalid key id in packet %s' % newpacket) return False if not my_keys.is_key_private(incoming_key_id): lg.warn('private key is not registered : %s' % incoming_key_id) p2p_service.SendFail(newpacket, 'private key is not registered') return False try: block = encrypted.Unserialize( newpacket.Payload, decrypt_key=incoming_key_id, ) except: lg.exc(newpacket.Payload) return False if block is None: lg.warn('failed reading data from %s' % newpacket.RemoteID) return False # if block.CreatorID != trusted_customer_idurl: # lg.warn('invalid packet, creator ID must be present in packet ID : %s ~ %s' % ( # block.CreatorID, list_files_global_id['idurl'], )) # return False try: raw_files = block.Data() except: lg.exc() return False if block.CreatorID == trusted_customer_idurl: # this is a trusted guy sending some shared files to me try: json_data = serialization.BytesToDict(raw_files, keys_to_text=True) json_data['items'] except: lg.exc() return False count = backup_fs.Unserialize( raw_data=json_data, iter=backup_fs.fs(trusted_customer_idurl), iterID=backup_fs.fsID(trusted_customer_idurl), from_json=True, ) p2p_service.SendAck(newpacket) events.send( 'shared-list-files-received', dict( customer_idurl=trusted_customer_idurl, new_items=count, )) if count == 0: lg.warn('no files were imported during file sharing') else: backup_control.Save() lg.info('imported %d shared files from %s, key_id=%s' % ( count, trusted_customer_idurl, incoming_key_id, )) return True # otherwise this must be an external supplier sending us a files he stores for trusted customer external_supplier_idurl = block.CreatorID try: supplier_raw_list_files = list_files.UnpackListFiles( raw_files, settings.ListFilesFormat()) backup_matrix.SaveLatestRawListFiles( supplier_idurl=external_supplier_idurl, raw_data=supplier_raw_list_files, customer_idurl=trusted_customer_idurl, ) except: lg.exc() return False # need to detect supplier position from the list of packets # and place that supplier on the correct position in contactsdb real_supplier_pos = backup_matrix.DetectSupplierPosition( supplier_raw_list_files) known_supplier_pos = contactsdb.supplier_position( external_supplier_idurl, trusted_customer_idurl) if real_supplier_pos >= 0: if known_supplier_pos >= 0 and known_supplier_pos != real_supplier_pos: lg.warn( 'external supplier %s position is not matching to list files, rewriting for customer %s' % (external_supplier_idurl, trusted_customer_idurl)) contactsdb.erase_supplier( idurl=external_supplier_idurl, customer_idurl=trusted_customer_idurl, ) contactsdb.add_supplier( idurl=external_supplier_idurl, position=real_supplier_pos, customer_idurl=trusted_customer_idurl, ) contactsdb.save_suppliers(customer_idurl=trusted_customer_idurl) else: lg.warn( 'not possible to detect external supplier position for customer %s' % trusted_customer_idurl) # finally send ack packet back p2p_service.SendAck(newpacket) lg.info( 'received list of packets from external supplier %s for customer %s' % (external_supplier_idurl, trusted_customer_idurl)) return True
def request(self, json_payload, newpacket, info): from twisted.internet import reactor # @UnresolvedImport from logs import lg from main import events from crypt import my_keys from p2p import p2p_service from contacts import contactsdb from storage import accounting from userid import global_id customer_idurl = newpacket.OwnerID customer_id = global_id.UrlToGlobalID(customer_idurl) bytes_for_customer = 0 try: bytes_for_customer = int(json_payload['needed_bytes']) except: lg.warn("wrong payload" % newpacket.Payload) return p2p_service.SendFail(newpacket, 'wrong payload') try: customer_public_key = json_payload['customer_public_key'] customer_public_key_id = customer_public_key['key_id'] except: customer_public_key = None customer_public_key_id = None data_owner_idurl = None target_customer_idurl = None family_position = json_payload.get('position') ecc_map = json_payload.get('ecc_map') family_snapshot = json_payload.get('family_snapshot') key_id = json_payload.get('key_id') target_customer_id = json_payload.get('customer_id') if key_id: # this is a request from external user to access shared data stored by one of my customers # this is "second" customer requesting data from "first" customer if not key_id or not my_keys.is_valid_key_id(key_id): lg.warn('missed or invalid key id') return p2p_service.SendFail(newpacket, 'invalid key id') target_customer_idurl = global_id.GlobalUserToIDURL( target_customer_id) if not contactsdb.is_customer(target_customer_idurl): lg.warn("target user %s is not a customer" % target_customer_id) return p2p_service.SendFail(newpacket, 'not a customer') if target_customer_idurl == customer_idurl: lg.warn('customer %s requesting shared access to own files' % customer_idurl) return p2p_service.SendFail(newpacket, 'invalid case') if not my_keys.is_key_registered(key_id): lg.warn('key not registered: %s' % key_id) p2p_service.SendFail(newpacket, 'key not registered') return False data_owner_idurl = my_keys.split_key_id(key_id)[1] if data_owner_idurl != target_customer_idurl and data_owner_idurl != customer_idurl: # pretty complex scenario: # external customer requesting access to data which belongs not to that customer # this is "third" customer accessing data belongs to "second" customer # TODO: for now just stop it lg.warn( 'under construction, key_id=%s customer_idurl=%s target_customer_idurl=%s' % ( key_id, customer_idurl, target_customer_idurl, )) p2p_service.SendFail(newpacket, 'under construction') return False # do not create connection with that customer, only accept the request lg.info( 'external customer %s requested access to shared data at %s' % ( customer_id, key_id, )) return p2p_service.SendAck(newpacket, 'accepted') # key_id is not present in the request: # this is a request to connect new customer (or reconnect existing one) to that supplier if not bytes_for_customer or bytes_for_customer < 0: lg.warn("wrong payload : %s" % newpacket.Payload) return p2p_service.SendFail(newpacket, 'wrong storage value') current_customers = contactsdb.customers() if accounting.check_create_customers_quotas(): lg.out(6, 'service_supplier.request created a new space file') space_dict = accounting.read_customers_quotas() try: free_bytes = int(space_dict[b'free']) except: lg.exc() return p2p_service.SendFail(newpacket, 'broken space file') if (customer_idurl not in current_customers and customer_idurl in list(space_dict.keys())): lg.warn("broken space file") return p2p_service.SendFail(newpacket, 'broken space file') if (customer_idurl in current_customers and customer_idurl not in list(space_dict.keys())): lg.warn("broken customers file") return p2p_service.SendFail(newpacket, 'broken customers file') if customer_idurl in current_customers: free_bytes += int(space_dict.get(customer_idurl, 0)) space_dict[b'free'] = free_bytes current_customers.remove(customer_idurl) space_dict.pop(customer_idurl) new_customer = False else: new_customer = True lg.out( 8, ' new_customer=%s current_allocated_bytes=%s' % ( new_customer, space_dict.get(customer_idurl), )) from supplier import local_tester if free_bytes <= bytes_for_customer: contactsdb.update_customers(current_customers) contactsdb.remove_customer_meta_info(customer_idurl) contactsdb.save_customers() accounting.write_customers_quotas(space_dict) if customer_public_key_id: my_keys.erase_key(customer_public_key_id) reactor.callLater( 0, local_tester.TestUpdateCustomers) # @UndefinedVariable if new_customer: lg.out( 8, " NEW CUSTOMER: DENIED !!!!!!!!!!! not enough space available" ) events.send('new-customer-denied', dict(idurl=customer_idurl)) else: lg.out( 8, " OLD CUSTOMER: DENIED !!!!!!!!!!! not enough space available" ) events.send('existing-customer-denied', dict(idurl=customer_idurl)) return p2p_service.SendAck(newpacket, 'deny') space_dict[b'free'] = free_bytes - bytes_for_customer current_customers.append(customer_idurl) space_dict[customer_idurl] = bytes_for_customer contactsdb.update_customers(current_customers) contactsdb.save_customers() contactsdb.add_customer_meta_info( customer_idurl, { 'ecc_map': ecc_map, 'position': family_position, 'family_snapshot': family_snapshot, }) accounting.write_customers_quotas(space_dict) if customer_public_key_id: my_keys.erase_key(customer_public_key_id) try: if not my_keys.is_key_registered(customer_public_key_id): key_id, key_object = my_keys.read_key_info( customer_public_key) if not my_keys.register_key(key_id, key_object): lg.err('failed to register customer public key') except: lg.exc() else: lg.warn('customer public key was not provided in the request') reactor.callLater( 0, local_tester.TestUpdateCustomers) # @UndefinedVariable if new_customer: lg.out( 8, " NEW CUSTOMER: ACCEPTED %s family_position=%s ecc_map=%s allocated_bytes=%s" % (customer_idurl, family_position, ecc_map, bytes_for_customer)) lg.out( 8, " family_snapshot=%r !!!!!!!!!!!!!!" % family_snapshot, ) events.send( 'new-customer-accepted', dict( idurl=customer_idurl, allocated_bytes=bytes_for_customer, ecc_map=ecc_map, position=family_position, family_snapshot=family_snapshot, key_id=customer_public_key_id, )) else: lg.out( 8, " OLD CUSTOMER: ACCEPTED %s family_position=%s ecc_map=%s allocated_bytes=%s" % (customer_idurl, family_position, ecc_map, bytes_for_customer)) lg.out( 8, " family_snapshot=%r !!!!!!!!!!!!!!" % family_snapshot) events.send( 'existing-customer-accepted', dict( idurl=customer_idurl, allocated_bytes=bytes_for_customer, ecc_map=ecc_map, position=family_position, key_id=customer_public_key_id, family_snapshot=family_snapshot, )) return p2p_service.SendAck(newpacket, 'accepted')
def on_event_packet_received(newpacket, info, status, error_message): if newpacket.Command != commands.Event(): return False try: e_json = json.loads(newpacket.Payload) event_id = e_json['event_id'] payload = e_json['payload'] queue_id = e_json.get('queue_id') producer_id = e_json.get('producer_id') message_id = e_json.get('message_id') created = e_json.get('created') except: lg.warn("invlid json payload") return False if queue_id and producer_id and message_id: # this message have an ID and producer so it came from a queue and needs to be consumed # also add more info comming from the queue if _Debug: lg.warn('received event from the queue at %s' % queue_id) payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True # this message does not have nor ID nor producer so it came from another user directly # lets' try to find a queue for that event and see if we need to publish it or not queue_id = global_id.MakeGlobalQueueID( queue_alias=event_id, owner_id=global_id.MakeGlobalID(idurl=newpacket.OwnerID), supplier_id=global_id.MakeGlobalID(idurl=my_id.getGlobalID()), ) if queue_id not in queue(): # such queue is not found locally, that means message is # probably addressed to that node and needs to be consumed directly if _Debug: lg.warn( 'received event was not delivered to any queue, consume now and send an Ack' ) # also add more info comming from the queue payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True # found a queue for that message, pushing there # TODO: add verification of producer's identity and signature if _Debug: lg.warn('pushing event to the queue %s on behalf of producer %s' % (queue_id, producer_id)) try: push_message( producer_id=producer_id, queue_id=queue_id, data=payload, creation_time=created, ) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return True p2p_service.SendAck(newpacket) return True
def request(self, json_payload, newpacket, info): # TODO: work in progress from main import events from p2p import p2p_service events.send('key-registry-request', dict(idurl=newpacket.OwnerID)) return p2p_service.SendAck(newpacket, 'accepted')
def _on_files_received(self, newpacket, info): import json from logs import lg from p2p import p2p_service from storage import backup_fs from storage import backup_control from crypt import encrypted from crypt import my_keys from userid import my_id from userid import global_id try: user_id = newpacket.PacketID.strip().split(':')[0] if user_id == my_id.getGlobalID(): # skip my own Files() packets which comes from my suppliers # only process list Files() from other users who granted me access return False key_id = user_id if not my_keys.is_valid_key_id(key_id): # ignore, invalid key id in packet id return False if not my_keys.is_key_private(key_id): raise Exception('private key is not registered') except Exception as exc: lg.warn(str(exc)) p2p_service.SendFail(newpacket, str(exc)) return False block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.warn('failed reading data from %s' % newpacket.RemoteID) return False if block.CreatorID != global_id.GlobalUserToIDURL(user_id): lg.warn( 'invalid packet, creator ID must be present in packet ID : %s ~ %s' % ( block.CreatorID, user_id, )) return False try: json_data = json.loads(block.Data(), encoding='utf-8') json_data['items'] customer_idurl = block.CreatorID count = backup_fs.Unserialize( raw_data=json_data, iter=backup_fs.fs(customer_idurl), iterID=backup_fs.fsID(customer_idurl), from_json=True, ) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False p2p_service.SendAck(newpacket) if count == 0: lg.warn('no files were imported during file sharing') else: backup_control.Save() lg.info('imported %d shared files from %s, key_id=%s' % ( count, customer_idurl, key_id, )) return True # from access import shared_access_coordinator # this_share = shared_access_coordinator.get_active_share(key_id) # if not this_share: # lg.warn('share is not opened: %s' % key_id) # p2p_service.SendFail(newpacket, 'share is not opened') # return False # this_share.automat('customer-list-files-received', (newpacket, info, block, )) return True
def on_data(newpacket): if id_url.to_bin(newpacket.OwnerID) == my_id.getIDURL().to_bin(): # this Data belong to us, SKIP return False # if not contactsdb.is_customer(newpacket.OwnerID): # # SECURITY # # TODO: process files from another customer : glob_path['idurl'] # lg.warn("skip, %s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID)) # # p2p_service.SendFail(newpacket, 'not a customer') # return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") # p2p_service.SendFail(newpacket, 'incorrect path') return False authorized_idurl = verify_packet_ownership(newpacket) if authorized_idurl is None: lg.err("ownership verification failed for %r" % newpacket) # p2p_service.SendFail(newpacket, 'ownership verification failed') return False filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") # p2p_service.SendFail(newpacket, 'empty filename') return False dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.err("can not create sub dir %s" % dirname) p2p_service.SendFail(newpacket, 'write error', remote_idurl=authorized_idurl) return False data = newpacket.Serialize() donated_bytes = settings.getDonatedBytes() accounting.check_create_customers_quotas(donated_bytes) space_dict, _ = accounting.read_customers_quotas() if newpacket.OwnerID.to_bin() not in list(space_dict.keys()): lg.err("customer space is broken, no info about donated space for %s" % newpacket.OwnerID) p2p_service.SendFail( newpacket, 'customer space is broken, no info about donated space', remote_idurl=authorized_idurl) return False used_space_dict = accounting.read_customers_usage() if newpacket.OwnerID.to_bin() in list(used_space_dict.keys()): try: bytes_used_by_customer = int( used_space_dict[newpacket.OwnerID.to_bin()]) bytes_donated_to_customer = int( space_dict[newpacket.OwnerID.to_bin()]) if bytes_donated_to_customer - bytes_used_by_customer < len(data): lg.warn("no free space left for customer data: %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no free space left for customer data', remote_idurl=authorized_idurl) return False except: lg.exc() if not bpio.WriteBinaryFile(filename, data): lg.err("can not write to %s" % str(filename)) p2p_service.SendFail(newpacket, 'write error', remote_idurl=authorized_idurl) return False # Here Data() packet was stored as it is on supplier node (current machine) del data p2p_service.SendAck(newpacket, response=strng.to_text(len(newpacket.Payload)), remote_idurl=authorized_idurl) reactor.callLater(0, local_tester.TestSpaceTime) # @UndefinedVariable # if self.publish_event_supplier_file_modified: # TODO: must remove that actually # from main import events # events.send('supplier-file-modified', data=dict( # action='write', # glob_path=glob_path['path'], # owner_id=newpacket.OwnerID, # )) return True
def do_handle_event_packet(newpacket, e_json): event_id = strng.to_text(e_json['event_id']) payload = e_json['payload'] queue_id = strng.to_text(e_json.get('queue_id')) producer_id = e_json.get('producer_id') message_id = strng.to_text(e_json.get('message_id')) created = strng.to_text(e_json.get('created')) if _Debug: lg.args(_DebugLevel, event_id=event_id, queue_id=queue_id, producer_id=producer_id, message_id=message_id) if queue_id and producer_id and message_id: # this message have an ID and producer so it came from a queue and needs to be consumed # also needs to be attached more info coming from the queue to the event body if _Debug: lg.info('received new event %s from the queue at %s' % ( event_id, queue_id, )) payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True if producer_id == my_id.getID() and not queue_id: # this message addressed to me but not to any queue exclusively return True # this message does not have nor ID nor producer so it came from another user directly # lets' try to find a queue for that event and see if we need to publish it or not queue_id = global_id.MakeGlobalQueueID( queue_alias=event_id, owner_id=global_id.MakeGlobalID(idurl=newpacket.OwnerID), supplier_id=global_id.MakeGlobalID(idurl=my_id.getGlobalID()), ) if queue_id not in queue(): # such queue is not found locally, that means message is # probably addressed to that node and needs to be consumed directly if _Debug: lg.warn( 'received event %s was not delivered to any queue, consume now and send an Ack' % event_id) # also add more info comming from the queue payload.update( dict( queue_id=queue_id, producer_id=producer_id, message_id=message_id, created=created, )) events.send(event_id, data=payload) p2p_service.SendAck(newpacket) return True # found a queue for that message, pushing there # TODO: add verification of producer's identity and signature if _Debug: lg.info('pushing event %s to the queue %s on behalf of producer %s' % (event_id, queue_id, producer_id)) try: write_message( producer_id=producer_id, queue_id=queue_id, data=payload, creation_time=created, ) except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return True p2p_service.SendAck(newpacket) return True
def on_key_received(newpacket, info, status, error_message): block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'key_ring.on_key_received ERROR reading data from %s' % newpacket.RemoteID) return False try: key_data = block.Data() key_json = json.loads(key_data) key_id = key_json['key_id'] key_id, key_object = my_keys.read_key_info(key_json) if key_object.isPublic(): # received key is a public key if my_keys.is_key_registered(key_id): # but we already have a key with that ID if my_keys.is_key_private(key_id): # we should not overwrite existing private key raise Exception('private key already registered') if my_keys.get_public_key_raw( key_id, 'openssh') != key_object.toString('openssh'): # and we should not overwrite existing public key as well raise Exception( 'another key already registered with that ID') p2p_service.SendAck(newpacket) lg.warn('received existing public key: %s, skip' % key_id) return True if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') else: lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) if _Debug: lg.info( 'received and stored locally a new key %s, include_private=%s' % (key_id, key_json.get('include_private'))) return True # received key is a private key if my_keys.is_key_registered(key_id): # check if we already have that key if my_keys.is_key_private(key_id): # we have already private key with same ID!!! if my_keys.get_private_key_raw( key_id, 'openssh') != key_object.toString('openssh'): # and this is a new private key : we should not overwrite! raise Exception('private key already registered') # this is the same private key p2p_service.SendAck(newpacket) lg.warn('received existing private key: %s, skip' % key_id) return True # but we have a public key with same ID if my_keys.get_public_key_raw( key_id, 'openssh') != key_object.public().toString('openssh'): # and we should not overwrite existing public key as well raise Exception('another key already registered with that ID') lg.info('erasing public key %s' % key_id) my_keys.erase_key(key_id) if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True # no private key with given ID was registered if not my_keys.register_key(key_id, key_object): raise Exception('key register failed') lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic())) p2p_service.SendAck(newpacket) return True except Exception as exc: lg.exc() p2p_service.SendFail(newpacket, str(exc)) return False
def doSendAck(self, arg): """ Action method. """ p2p_service.SendAck(arg, wide=True)
def _do_process_request(self, *args, **kwargs): global _MaxRoutesNumber json_payload, request, info = args[0] user_id = request.CreatorID #--- commands.RequestService() if request.Command == commands.RequestService(): if len(self.routes) >= _MaxRoutesNumber: if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest RequestService rejected: too many routes' ) lg.out(_DebugLevel, ' %r' % self.routes) p2p_service.SendAck(request, 'rejected', wide=True) else: try: # idsrc = strng.to_bin(json_payload['identity']) idsrc = json_payload['identity'] cached_id = identity.identity(xmlsrc=idsrc) except: lg.out(_DebugLevel, 'payload: [%s]' % request.Payload) lg.exc() return if not cached_id.Valid(): lg.warn('incoming identity is not valid') return if not cached_id.isCorrect(): lg.warn('incoming identity is not correct') return if user_id != cached_id.getIDURL(): lg.warn( 'incoming identity is not belong to request packet creator' ) return if contactsdb.is_supplier(user_id): if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest RequestService rejected: this user is my supplier' ) p2p_service.SendAck(request, 'rejected', wide=True) return oldnew = '' if user_id not in list(self.routes.keys()): # accept new route oldnew = 'NEW' self.routes[user_id] = {} else: # accept existing routed user oldnew = 'OLD' if not self._is_my_contacts_present_in_identity(cached_id): if _Debug: lg.out(_DebugLevel, ' DO OVERRIDE identity for %s' % user_id) identitycache.OverrideIdentity(user_id, cached_id.serialize()) else: if _Debug: lg.out( _DebugLevel, ' SKIP OVERRIDE identity for %s' % user_id) self.routes[user_id]['time'] = time.time() self.routes[user_id]['identity'] = cached_id.serialize( as_text=True) self.routes[user_id]['publickey'] = strng.to_text( cached_id.publickey) self.routes[user_id][ 'contacts'] = cached_id.getContactsAsTuples(as_text=True) self.routes[user_id]['address'] = [] self._write_route(user_id) active_user_sessions = gateway.find_active_session( info.proto, info.host) if active_user_sessions: user_connection_info = { 'id': active_user_sessions[0].id, 'index': active_user_sessions[0].index, 'proto': info.proto, 'host': info.host, 'idurl': user_id, } active_user_session_machine = automat.objects().get( user_connection_info['index'], None) if active_user_session_machine: active_user_session_machine.addStateChangedCallback( lambda o, n, e, a: self. _on_user_session_disconnected(user_id, o, n, e, a), oldstate='CONNECTED', ) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest connected %s routed user, set active session: %s' % (oldnew.capitalize(), user_connection_info)) else: lg.err('not found session state machine: %s' % user_connection_info['index']) else: if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest active connection with user %s at %s:%s not yet exist' % ( user_id, info.proto, info.host, )) lg.out( _DebugLevel, ' current active sessions: %d' % len(gateway.list_active_sessions(info.proto))) self.acks.append( p2p_service.SendAck(request, 'accepted', wide=True)) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest !!!!!!! ACCEPTED %s ROUTE for %s contacts=%s' % ( oldnew.capitalize(), user_id, self.routes[user_id]['contacts'], )) #--- commands.CancelService() elif request.Command == commands.CancelService(): if user_id in self.routes: # cancel existing route self._remove_route(user_id) self.routes.pop(user_id) identitycache.StopOverridingIdentity(user_id) p2p_service.SendAck(request, 'accepted', wide=True) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest !!!!!!! CANCELLED ROUTE for %s' % user_id) else: p2p_service.SendAck(request, 'rejected', wide=True) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest CancelService rejected : %s is not found in routes' % user_id) lg.out(_DebugLevel, ' %r' % self.routes) else: p2p_service.SendFail(request, 'rejected', wide=True)
def _on_data(self, newpacket): import os from twisted.internet import reactor # @UnresolvedImport from logs import lg from lib import jsn from system import bpio from main import settings from userid import my_id from userid import global_id from contacts import contactsdb from p2p import p2p_service from storage import accounting if newpacket.OwnerID == my_id.getLocalID(): # this Data belong to us, SKIP return False if not contactsdb.is_customer(newpacket.OwnerID): # SECURITY # TODO: process files from another customer : glob_path['idurl'] lg.warn("skip, %s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID)) # p2p_service.SendFail(newpacket, 'not a customer') return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'empty filename') return False dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.err("can not create sub dir %s" % dirname) p2p_service.SendFail(newpacket, 'write error') return False data = newpacket.Serialize() donated_bytes = settings.getDonatedBytes() accounting.check_create_customers_quotas(donated_bytes) space_dict = accounting.read_customers_quotas() if newpacket.OwnerID not in list(space_dict.keys()): lg.err("no info about donated space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no info about donated space') return False used_space_dict = accounting.read_customers_usage() if newpacket.OwnerID in list(used_space_dict.keys()): try: bytes_used_by_customer = int( used_space_dict[newpacket.OwnerID]) bytes_donated_to_customer = int(space_dict[newpacket.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len( data): lg.warn("no free space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no free space') return False except: lg.exc() if not bpio.WriteBinaryFile(filename, data): lg.err("can not write to %s" % str(filename)) p2p_service.SendFail(newpacket, 'write error') return False # Here Data() packet was stored as it is on supplier node (current machine) sz = len(data) del data lg.out(self.debug_level, "service_supplier._on_data %r" % newpacket) lg.out( self.debug_level, " from [ %s | %s ]" % ( newpacket.OwnerID, newpacket.CreatorID, )) lg.out(self.debug_level, " saved with %d bytes to %s" % ( sz, filename, )) p2p_service.SendAck(newpacket, str(len(newpacket.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) # @UndefinedVariable if self.publish_event_supplier_file_modified: from main import events events.send('supplier-file-modified', data=dict( action='write', glob_path=glob_path['path'], owner_id=newpacket.OwnerID, )) return True
def doProcessRequest(self, arg): """ Action method. """ global _MaxRoutesNumber request, _ = arg user_id = request.CreatorID if request.Command == commands.RequestService(): if len(self.routes) >= _MaxRoutesNumber: if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest RequestService rejected: too many routes' ) lg.out(_DebugLevel, ' %s' % pprint.pformat(self.routes)) p2p_service.SendAck(request, 'rejected', wide=True) else: try: service_info = request.Payload idsrc = service_info.lstrip('service_proxy_server').strip() cached_id = identity.identity(xmlsrc=idsrc) except: lg.out(_DebugLevel, 'payload: [%s]' % request.Payload) lg.exc() return if not cached_id.isCorrect() or not cached_id.Valid(): lg.warn('incoming identity is not valid') return oldnew = '' if user_id not in self.routes.keys(): # accept new route oldnew = 'NEW' self.routes[user_id] = {} else: # accept existing router oldnew = 'OLD' if not self._is_my_contacts_present_in_identity(cached_id): if _Debug: lg.out(_DebugLevel, ' DO OVERRIDE identity for %s' % user_id) identitycache.OverrideIdentity(user_id, idsrc) else: if _Debug: lg.out( _DebugLevel, ' SKIP OVERRIDE identity for %s' % user_id) self.routes[user_id]['time'] = time.time() self.routes[user_id]['identity'] = idsrc self.routes[user_id]['publickey'] = cached_id.publickey self.routes[user_id][ 'contacts'] = cached_id.getContactsAsTuples() self.routes[user_id]['address'] = [] self._write_route(user_id) self.acks.append( p2p_service.SendAck(request, 'accepted', wide=True, packetid=request.PacketID)) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest !!!!!!! ACCEPTED %s ROUTE for %s' % (oldnew, user_id)) elif request.Command == commands.CancelService(): if user_id in self.routes: # cancel existing route self._remove_route(user_id) self.routes.pop(user_id) identitycache.StopOverridingIdentity(user_id) p2p_service.SendAck(request, 'accepted', wide=True) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest !!!!!!! CANCELLED ROUTE for %s' % user_id) else: p2p_service.SendAck(request, 'rejected', wide=True) if _Debug: lg.out( _DebugLevel, 'proxy_server.doProcessRequest CancelService rejected : %s is not found in routes' % user_id) lg.out(_DebugLevel, ' %s' % pprint.pformat(self.routes)) else: p2p_service.SendFail(request, 'wrong command or payload') # , wide=True)
def request(self, json_payload, newpacket, info): # TODO: work in progress # TODO: we can add some limit for number of connections here from p2p import p2p_service return p2p_service.SendAck(newpacket, 'accepted')
def request(self, request, info): from main import events from p2p import p2p_service events.send('key-registry-request', dict(idurl=request.OwnerID)) return p2p_service.SendAck(request, 'accepted')
def cancel(self, json_payload, request, info): # TODO: work in progress from p2p import p2p_service return p2p_service.SendAck(request, 'accepted')