Ejemplo n.º 1
0
    def test_encrypted_block(self):
        from crypt import key
        from crypt import encrypted
        from userid import my_id

        key.InitMyKey()
        data1 = os.urandom(1024)
        b1 = encrypted.Block(
            CreatorID=my_id.getLocalIDURL(),
            BackupID='BackupABC',
            BlockNumber=123,
            SessionKey=key.NewSessionKey(),
            SessionKeyType=key.SessionKeyType(),
            LastBlock=True,
            Data=data1,
        )
        self.assertTrue(b1.Valid())
        raw1 = b1.Serialize()

        b2 = encrypted.Unserialize(raw1)
        self.assertTrue(b2.Valid())
        raw2 = b2.Serialize()
        data2 = b2.Data()
        self.assertEqual(data1, data2)
        self.assertEqual(raw1, raw2)
Ejemplo n.º 2
0
 def doRestoreBlock(self, *args, **kwargs):
     """
     Action method.
     """
     filename = args[0]
     blockbits = bpio.ReadBinaryFile(filename)
     if not blockbits:
         self.automat('block-failed')
         return
     try:
         splitindex = blockbits.index(b":")
         lengthstring = blockbits[0:splitindex]
         datalength = int(lengthstring)  # real length before raidmake/ECC
         blockdata = blockbits[
             splitindex + 1:splitindex + 1 +
             datalength]  # remove padding from raidmake/ECC
         newblock = encrypted.Unserialize(
             blockdata, decrypt_key=self.key_id)  # convert to object
     except:
         self.automat('block-failed')
         if _Debug:
             lg.exc('bad block: %r' % blockbits)
         else:
             lg.exc()
         return
     if not newblock:
         self.automat('block-failed')
         return
     self.automat('block-restored', (
         newblock,
         filename,
     ))
Ejemplo n.º 3
0
 def doRestoreBlock(self, arg):
     """
     Action method.
     """
     filename = arg
     blockbits = bpio.ReadBinaryFile(filename)
     if not blockbits:
         self.automat('block-failed')
         return
     splitindex = blockbits.index(":")
     lengthstring = blockbits[0:splitindex]
     try:
         datalength = int(lengthstring)  # real length before raidmake/ECC
         blockdata = blockbits[
             splitindex + 1:splitindex + 1 +
             datalength]  # remove padding from raidmake/ECC
         newblock = encrypted.Unserialize(
             blockdata, decrypt_key=self.KeyID)  # convert to object
     except:
         lg.exc()
         self.automat('block-failed')
         return
     self.automat('block-restored', (
         newblock,
         filename,
     ))
Ejemplo n.º 4
0
def on_private_key_received(newpacket, info, status, error_message):
    block = encrypted.Unserialize(newpacket.Payload)
    if block is None:
        lg.out(
            2, 'key_ring.received_private_key ERROR reading data from %s' %
            newpacket.RemoteID)
        return False
    try:
        key_data = block.Data()
        key_json = json.loads(key_data)
        key_id = str(key_json['key_id'])
        # key_alias = key_json['alias']
        # key_creator = key_json['creator']
        # key_owner = key_json['owner']
        private_key_string = str(key_json['private'])
    except:
        lg.exc()
        return False
    if my_keys.is_key_registered(key_id):
        lg.warn('key "%s" already registered' % key_id)
        return True
    key_object = my_keys.register_key(key_id, private_key_string)
    if not key_object:
        return False
    p2p_service.SendAck(newpacket)
    return True
Ejemplo n.º 5
0
def IncomingSupplierListFiles(newpacket, list_files_global_id):
    """
    Called when command "Files" were received from one of my suppliers.
    This is an answer from given supplier (after my request) to get a
    list of our files stored on his machine.
    """
    supplier_idurl = newpacket.OwnerID
    # incoming_key_id = newpacket.PacketID.strip().split(':')[0]
    customer_idurl = list_files_global_id['idurl']
    num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl)
    if num < -1:
        lg.warn('unknown supplier: %s' % supplier_idurl)
        return False
    from supplier import list_files
    from customer import list_files_orator
    target_key_id = my_keys.latest_key_id(list_files_global_id['key_id'])
    if not my_keys.is_key_private(target_key_id):
        lg.warn('key %r not registered, not possible to decrypt ListFiles() packet from %r' % (target_key_id, supplier_idurl, ))
        return False
    try:
        block = encrypted.Unserialize(newpacket.Payload, decrypt_key=target_key_id, )
        input_data = block.Data()
    except:
        lg.err('failed decrypting data from packet %r received from %r' % (newpacket, supplier_idurl))
        return False
    list_files_raw = list_files.UnpackListFiles(input_data, settings.ListFilesFormat())
    remote_files_changed, backups2remove, paths2remove, missed_backups = backup_matrix.process_raw_list_files(
        supplier_num=num,
        list_files_text_body=list_files_raw,
        customer_idurl=None,
        is_in_sync=None,
        auto_create=False,
    )
    list_files_orator.IncomingListFiles(newpacket)
    if remote_files_changed:
        backup_matrix.SaveLatestRawListFiles(supplier_idurl, list_files_raw)
    if _Debug:
        lg.args(_DebugLevel, supplier=nameurl.GetName(supplier_idurl), customer=nameurl.GetName(customer_idurl),
                backups2remove=len(backups2remove), paths2remove=len(paths2remove),
                files_changed=remote_files_changed, missed_backups=len(missed_backups), )
    if len(backups2remove) > 0:
        p2p_service.RequestDeleteListBackups(backups2remove)
        if _Debug:
            lg.out(_DebugLevel, '    also sent requests to remove %d backups' % len(backups2remove))
    if len(paths2remove) > 0:
        p2p_service.RequestDeleteListPaths(paths2remove)
        if _Debug:
            lg.out(_DebugLevel, '    also sent requests to remove %d paths' % len(paths2remove))
    if len(missed_backups) > 0:
        from storage import backup_rebuilder
        backup_rebuilder.AddBackupsToWork(missed_backups)
        backup_rebuilder.A('start')
        if _Debug:
            lg.out(_DebugLevel, '    also triggered service_rebuilding with %d missed backups' % len(missed_backups))
    del backups2remove
    del paths2remove
    del missed_backups
    return True
Ejemplo n.º 6
0
def IncomingSupplierListFiles(newpacket, list_files_global_id):
    """
    Called by ``p2p.p2p_service`` when command "Files" were received from one
    of our suppliers.

    This is an answer from given supplier (after our request) to get a
    list of our files stored on his machine.
    """
    from p2p import p2p_service
    supplier_idurl = newpacket.OwnerID
    # incoming_key_id = newpacket.PacketID.strip().split(':')[0]
    customer_idurl = list_files_global_id['idurl']
    num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl)
    if num < -1:
        lg.warn('unknown supplier: %s' % supplier_idurl)
        return False
    from supplier import list_files
    from customer import list_files_orator
    try:
        block = encrypted.Unserialize(
            newpacket.Payload,
            decrypt_key=my_keys.make_key_id(alias='customer', creator_idurl=my_id.getLocalIDURL(), ),
        )
        input_data = block.Data()
    except:
        lg.out(2, 'backup_control.IncomingSupplierListFiles ERROR decrypting data from %s' % newpacket)
        return False
    src = list_files.UnpackListFiles(input_data, settings.ListFilesFormat())
    backups2remove, paths2remove, missed_backups = backup_matrix.ReadRawListFiles(num, src)
    list_files_orator.IncomingListFiles(newpacket)
    backup_matrix.SaveLatestRawListFiles(supplier_idurl, src)
    if _Debug:
        lg.out(_DebugLevel, 'backup_control.IncomingSupplierListFiles from [%s]: paths2remove=%d, backups2remove=%d missed_backups=%d' % (
            nameurl.GetName(supplier_idurl), len(paths2remove), len(backups2remove), len(missed_backups)))
    if len(backups2remove) > 0:
        p2p_service.RequestDeleteListBackups(backups2remove)
        if _Debug:
            lg.out(_DebugLevel, '    also sent requests to remove %d backups' % len(backups2remove))
    if len(paths2remove) > 0:
        p2p_service.RequestDeleteListPaths(paths2remove)
        if _Debug:
            lg.out(_DebugLevel, '    also sent requests to remove %d paths' % len(paths2remove))
    if len(missed_backups) > 0:
        from storage import backup_rebuilder
        backup_rebuilder.AddBackupsToWork(missed_backups)
        backup_rebuilder.A('start')
        if _Debug:
            lg.out(_DebugLevel, '    also triggered service_rebuilding with %d missed backups' % len(missed_backups))
    del backups2remove
    del paths2remove
    del missed_backups
    return True
Ejemplo n.º 7
0
def on_audit_key_received(newpacket, info, status, error_message):
    """
    Callback will be executed when remote user would like to check if I poses given key locally.
    """
    block = encrypted.Unserialize(newpacket.Payload)
    if block is None:
        lg.out(
            2, 'key_ring.on_audit_key_received ERROR reading data from %s' %
            newpacket.RemoteID)
        return False
    try:
        raw_payload = block.Data()
        json_payload = serialization.BytesToDict(raw_payload,
                                                 keys_to_text=True,
                                                 values_to_text=True)
        key_id = json_payload['key_id']
        json_payload['audit']
        public_sample = base64.b64decode(
            json_payload['audit']['public_sample'])
        private_sample = base64.b64decode(
            json_payload['audit']['private_sample'])
    except Exception as exc:
        lg.exc()
        p2p_service.SendFail(newpacket, str(exc))
        return False
    if not my_keys.is_valid_key_id(key_id):
        p2p_service.SendFail(newpacket, 'invalid key id')
        return False
    if not my_keys.is_key_registered(key_id, include_master=True):
        p2p_service.SendFail(newpacket, 'key not registered')
        return False
    if public_sample:
        response_payload = base64.b64encode(
            my_keys.encrypt(key_id, public_sample))
        p2p_service.SendAck(newpacket, response_payload)
        if _Debug:
            lg.info('remote user %s requested audit of public key %s' %
                    (newpacket.OwnerID, key_id))
        return True
    if private_sample:
        if not my_keys.is_key_private(key_id):
            p2p_service.SendFail(newpacket, 'private key not registered')
            return False
        response_payload = base64.b64encode(
            my_keys.decrypt(key_id, private_sample))
        p2p_service.SendAck(newpacket, response_payload)
        if _Debug:
            lg.info('remote user %s requested audit of private key %s' %
                    (newpacket.OwnerID, key_id))
        return True
    p2p_service.SendFail(newpacket, 'wrong audit request')
    return False
Ejemplo n.º 8
0
 def doProcessInboxPacket(self, arg):
     """
     Action method.
     """
     newpacket, info, _, _ = arg
     block = encrypted.Unserialize(newpacket.Payload)
     if block is None:
         lg.out(
             2,
             'proxy_receiver.doProcessInboxPacket ERROR reading data from %s'
             % newpacket.CreatorID)
         return
     try:
         session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey)
         padded_data = key.DecryptWithSessionKey(session_key,
                                                 block.EncryptedData)
         inpt = cStringIO.StringIO(padded_data[:int(block.Length)])
         data = inpt.read()
     except:
         lg.out(
             2,
             'proxy_receiver.doProcessInboxPacket ERROR reading data from %s'
             % newpacket.CreatorID)
         lg.exc()
         try:
             inpt.close()
         except:
             pass
         return
     inpt.close()
     routed_packet = signed.Unserialize(data)
     if not routed_packet:
         lg.out(
             2,
             'proxy_receiver.doProcessInboxPacket ERROR unserialize packet from %s'
             % newpacket.CreatorID)
         return
     if _Debug:
         lg.out(
             _DebugLevel, '<<<Relay-IN %s from %s://%s' % (
                 str(routed_packet),
                 info.proto,
                 info.host,
             ))
     packet_in.process(routed_packet, info)
     del block
     del data
     del padded_data
     del inpt
     del session_key
     del routed_packet
Ejemplo n.º 9
0
def IncomingSupplierBackupIndex(newpacket):
    """
    Called by ``p2p.p2p_service`` when a remote copy of our local index data
    base ( in the "Data" packet ) is received from one of our suppliers.

    The index is also stored on suppliers to be able to restore it.
    """
    b = encrypted.Unserialize(newpacket.Payload)
    if b is None:
        lg.out(2, 'backup_control.IncomingSupplierBackupIndex ERROR reading data from %s' % newpacket.RemoteID)
        return
    try:
        session_key = key.DecryptLocalPrivateKey(b.EncryptedSessionKey)
        padded_data = key.DecryptWithSessionKey(session_key, b.EncryptedData)
        inpt = cStringIO.StringIO(padded_data[:int(b.Length)])
        supplier_revision = inpt.readline().rstrip('\n')
        if supplier_revision:
            supplier_revision = int(supplier_revision)
        else:
            supplier_revision = -1
        # inpt.seek(0)
    except:
        lg.out(2, 'backup_control.IncomingSupplierBackupIndex ERROR reading data from %s' % newpacket.RemoteID)
        lg.out(2, '\n' + padded_data)
        lg.exc()
        try:
            inpt.close()
        except:
            pass
        return
    if driver.is_on('service_backup_db'):
        from storage import index_synchronizer
        index_synchronizer.A('index-file-received', (newpacket, supplier_revision))
    if revision() >= supplier_revision:
        inpt.close()
        lg.out(4, 'backup_control.IncomingSupplierBackupIndex SKIP, supplier %s revision=%d, local revision=%d' % (
            newpacket.RemoteID, supplier_revision, revision(), ))
        return
    raw_data = inpt.read()
    inpt.close()
    if ReadIndex(raw_data):
        commit(supplier_revision)
        backup_fs.Scan()
        backup_fs.Calculate()
        WriteIndex()
        control.request_update()
        lg.out(4, 'backup_control.IncomingSupplierBackupIndex updated to revision %d from %s' % (
            revision(), newpacket.RemoteID))
    else:
        lg.warn('failed to read catalog index from supplier')
Ejemplo n.º 10
0
 def doProcessInboxPacket(self, arg):
     """
     Action method.
     """
     newpacket, info, _, _ = arg
     block = encrypted.Unserialize(newpacket.Payload)
     if block is None:
         lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR reading data from %s' % newpacket.CreatorID)
         return
     try:
         session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey)
         padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData)
         inpt = cStringIO.StringIO(padded_data[:int(block.Length)])
         data = inpt.read()
     except:
         lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR reading data from %s' % newpacket.CreatorID)
         lg.exc()
         try:
             inpt.close()
         except:
             pass
         return
     inpt.close()
     routed_packet = signed.Unserialize(data)
     if not routed_packet:
         lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR unserialize packet failed from %s' % newpacket.CreatorID)
         return
     if routed_packet.Command == commands.Identity():
         newidentity = identity.identity(xmlsrc=routed_packet.Payload)
         idurl = newidentity.getIDURL()
         if not identitycache.HasKey(idurl):
             lg.warn('received new identity: %s' % idurl)
         if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload):
             lg.warn("ERROR has non-Valid identity")
             return
     if not routed_packet.Valid():
         lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR invalid packet from %s' % newpacket.CreatorID)
         return
     self.traffic_in += len(data)
     if _Debug:
         lg.out(_DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % (
             str(routed_packet), info.proto, info.host, len(data)))
     packet_in.process(routed_packet, info)
     del block
     del data
     del padded_data
     del inpt
     del session_key
     del routed_packet
Ejemplo n.º 11
0
def IncomingSupplierBackupIndex(newpacket):
    """
    Called by ``p2p.p2p_service`` when a remote copy of our local index data
    base ( in the "Data" packet ) is received from one of our suppliers.

    The index is also stored on suppliers to be able to restore it.
    """
    b = encrypted.Unserialize(newpacket.Payload)
    if b is None:
        lg.err('failed reading data from %s' % newpacket.RemoteID)
        return None
    try:
        session_key = key.DecryptLocalPrivateKey(b.EncryptedSessionKey)
        padded_data = key.DecryptWithSessionKey(session_key, b.EncryptedData, session_key_type=b.SessionKeyType)
        inpt = StringIO(strng.to_text(padded_data[:int(b.Length)]))
        supplier_revision = inpt.readline().rstrip('\n')
        if supplier_revision:
            supplier_revision = int(supplier_revision)
        else:
            supplier_revision = -1
    except:
        lg.exc()
        try:
            inpt.close()
        except:
            pass
        return None
    if revision() > supplier_revision:
        inpt.close()
        if _Debug:
            lg.out(_DebugLevel, 'backup_control.IncomingSupplierBackupIndex SKIP, supplier %s revision=%d, local revision=%d' % (
                newpacket.RemoteID, supplier_revision, revision(), ))
        return supplier_revision
    text_data = inpt.read()
    inpt.close()
    if ReadIndex(text_data):
        commit(supplier_revision)
        backup_fs.Scan()
        backup_fs.Calculate()
        WriteIndex()
        control.request_update()
        if _Debug:
            lg.out(_DebugLevel, 'backup_control.IncomingSupplierBackupIndex updated to revision %d from %s' % (
                revision(), newpacket.RemoteID))
    else:
        lg.warn('failed to read catalog index from supplier')
    return supplier_revision
Ejemplo n.º 12
0
    def _do_process_inbox_packet(self, *args, **kwargs):
        newpacket, info, _, _ = args[0]
        block = encrypted.Unserialize(newpacket.Payload)
        if block is None:
            lg.err('reading data from %s' % newpacket.CreatorID)
            return
        try:
            session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey)
            padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData)
            inpt = BytesIO(padded_data[:int(block.Length)])
            data = inpt.read()
        except:
            lg.err('reading data from %s' % newpacket.CreatorID)
            lg.exc()
            try:
                inpt.close()
            except:
                pass
            return
        inpt.close()
        routed_packet = signed.Unserialize(data)
        if not routed_packet:
            lg.err('unserialize packet failed from %s' % newpacket.CreatorID)
            return
        if _Debug:
            lg.out(_DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % (
                str(routed_packet), info.proto, info.host, len(data)))
        if routed_packet.Command == commands.Identity():
            if _Debug:
                lg.out(_DebugLevel, '    found identity in relay packet %s' % routed_packet)
            newidentity = identity.identity(xmlsrc=routed_packet.Payload)
            idurl = newidentity.getIDURL()
            if not identitycache.HasKey(idurl):
                lg.info('received new identity: %s' % idurl)
            if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload):
                lg.warn("ERROR has non-Valid identity")
                return
        if routed_packet.Command == commands.Relay() and routed_packet.PacketID.lower() == 'identity':
            if _Debug:
                lg.out(_DebugLevel, '    found routed identity in relay packet %s' % routed_packet)
            try:
                routed_identity = signed.Unserialize(routed_packet.Payload)
                newidentity = identity.identity(xmlsrc=routed_identity.Payload)
                idurl = newidentity.getIDURL()
                if not identitycache.HasKey(idurl):
                    lg.warn('received new "routed" identity: %s' % idurl)
                if not identitycache.UpdateAfterChecking(idurl, routed_identity.Payload):
                    lg.warn("ERROR has non-Valid identity")
                    return
            except:
                lg.exc()
#         if not routed_packet.Valid():
#             lg.err('invalid packet %s from %s' % (
#                 routed_packet, newpacket.CreatorID, ))
#             return
        self.traffic_in += len(data)
        packet_in.process(routed_packet, info)
        del block
        del data
        del padded_data
        del inpt
        del session_key
        del routed_packet
Ejemplo n.º 13
0
    def _on_files_received(self, newpacket, info):
        import json
        from logs import lg
        from p2p import p2p_service
        from storage import backup_fs
        from storage import backup_control
        from crypt import encrypted
        from crypt import my_keys
        from userid import my_id
        from userid import global_id
        try:
            user_id = newpacket.PacketID.strip().split(':')[0]
            if user_id == my_id.getGlobalID():
                # skip my own Files() packets which comes from my suppliers
                # only process list Files() from other users who granted me access
                return False
            key_id = user_id
            if not my_keys.is_valid_key_id(key_id):
                # ignore, invalid key id in packet id
                return False
            if not my_keys.is_key_private(key_id):
                raise Exception('private key is not registered')
        except Exception as exc:
            lg.warn(str(exc))
            p2p_service.SendFail(newpacket, str(exc))
            return False
        block = encrypted.Unserialize(newpacket.Payload)
        if block is None:
            lg.warn('failed reading data from %s' % newpacket.RemoteID)
            return False
        if block.CreatorID != global_id.GlobalUserToIDURL(user_id):
            lg.warn(
                'invalid packet, creator ID must be present in packet ID : %s ~ %s'
                % (
                    block.CreatorID,
                    user_id,
                ))
            return False
        try:
            json_data = json.loads(block.Data(), encoding='utf-8')
            json_data['items']
            customer_idurl = block.CreatorID
            count = backup_fs.Unserialize(
                raw_data=json_data,
                iter=backup_fs.fs(customer_idurl),
                iterID=backup_fs.fsID(customer_idurl),
                from_json=True,
            )
        except Exception as exc:
            lg.exc()
            p2p_service.SendFail(newpacket, str(exc))
            return False
        p2p_service.SendAck(newpacket)
        if count == 0:
            lg.warn('no files were imported during file sharing')
        else:
            backup_control.Save()
            lg.info('imported %d shared files from %s, key_id=%s' % (
                count,
                customer_idurl,
                key_id,
            ))
        return True

        #         from access import shared_access_coordinator
        #         this_share = shared_access_coordinator.get_active_share(key_id)
        #         if not this_share:
        #             lg.warn('share is not opened: %s' % key_id)
        #             p2p_service.SendFail(newpacket, 'share is not opened')
        #             return False
        #         this_share.automat('customer-list-files-received', (newpacket, info, block, ))
        return True
Ejemplo n.º 14
0
def on_key_received(newpacket, info, status, error_message):
    """
    Callback will be executed when I receive a new key from one remote user.
    """
    block = encrypted.Unserialize(newpacket.Payload)
    if block is None:
        lg.err('failed reading key info from %s' % newpacket.RemoteID)
        return False
    try:
        key_data = block.Data()
        key_json = serialization.BytesToDict(key_data, keys_to_text=True, values_to_text=True)
        key_id = key_json['key_id']
        key_label = key_json.get('label', '')
        key_id, key_object = my_keys.read_key_info(key_json)
        if key_object.isSigned():
            if not my_keys.verify_key_info_signature(key_json):
                raise Exception('key signature verification failed')
        if key_object.isPublic():
            # received key is a public key
            if my_keys.is_key_registered(key_id):
                # but we already have a key with that ID
                if my_keys.is_key_private(key_id):
                    # we should not overwrite existing private key
                    raise Exception('private key already registered')
                if my_keys.get_public_key_raw(key_id) != key_object.toPublicString():
                    # and we should not overwrite existing public key as well
                    raise Exception('another public key already registered with that ID and it is not matching')
                p2p_service.SendAck(newpacket)
                lg.warn('received existing public key: %s, skip' % key_id)
                return True
            if not my_keys.register_key(key_id, key_object, label=key_label):
                raise Exception('key register failed')
            else:
                lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic()))
            p2p_service.SendAck(newpacket)
            if _Debug:
                lg.info('received and stored locally a new key %s, include_private=%s' % (key_id, key_json.get('include_private')))
            return True
        # received key is a private key
        if my_keys.is_key_registered(key_id):
            # check if we already have that key
            if my_keys.is_key_private(key_id):
                # we have already private key with same ID!!!
                if my_keys.get_private_key_raw(key_id) != key_object.toPrivateString():
                    # and this is a new private key : we should not overwrite!
                    raise Exception('private key already registered and it is not matching')
                # this is the same private key
                p2p_service.SendAck(newpacket)
                lg.warn('received existing private key: %s, skip' % key_id)
                return True
            # but we have a public key with same ID already
            if my_keys.get_public_key_raw(key_id) != key_object.toPublicString():
                # and we should not overwrite existing public key as well
                raise Exception('another public key already registered with that ID and it is not matching with private key')
            lg.info('erasing public key %s' % key_id)
            my_keys.erase_key(key_id)
            if not my_keys.register_key(key_id, key_object, label=key_label):
                raise Exception('key register failed')
            lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic()))
            p2p_service.SendAck(newpacket)
            return True
        # no private key with given ID was registered
        if not my_keys.register_key(key_id, key_object, label=key_label):
            raise Exception('key register failed')
        lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic()))
        p2p_service.SendAck(newpacket)
        return True
    except Exception as exc:
        lg.exc()
        p2p_service.SendFail(newpacket, strng.to_text(exc))
    return False
Ejemplo n.º 15
0
def on_key_received(newpacket, info, status, error_message):
    """
    Callback will be executed when I receive a new key from one remote user.
    """
    block = encrypted.Unserialize(newpacket.Payload)
    if block is None:
        lg.err('failed reading key info from %s' % newpacket.RemoteID)
        return False
    try:
        key_data = block.Data()
        key_json = serialization.BytesToDict(key_data, keys_to_text=True, values_to_text=True)
        # key_id = strng.to_text(key_json['key_id'])
        key_label = strng.to_text(key_json.get('label', ''))
        key_id, key_object = my_keys.read_key_info(key_json)
        if key_object.isSigned():
            if not my_keys.verify_key_info_signature(key_json):
                raise Exception('received key signature verification failed: %r' % key_json)
            # TODO: must also compare "signature_pubkey" with pub key of the creator of the key!
        if key_object.isPublic():
            # received key is a public key
            if my_keys.is_key_registered(key_id):
                # but we already have a key with that ID
                if my_keys.is_key_private(key_id):
                    # we should not overwrite existing private key
                    # TODO: check other scenarios
                    raise Exception('private key already registered with %r' % key_id)
                if my_keys.get_public_key_raw(key_id) != key_object.toPublicString():
                    my_keys.erase_key(key_id)
                    if not my_keys.register_key(key_id, key_object, label=key_label):
                        raise Exception('key register failed')
                    else:
                        lg.info('replaced existing key %s, is_public=%s' % (key_id, key_object.isPublic()))
                    # normally should not overwrite existing public key
                    # TODO: look more if need to add some extra checks
                    # for example need to be able to overwrite or erase remotely some keys to cleanup
                    # raise Exception('another public key already registered with %r and new key is not matching' % key_id)
                p2p_service.SendAck(newpacket)
                lg.warn('received existing public key: %s, skip' % key_id)
                return True
            if not my_keys.register_key(key_id, key_object, label=key_label):
                raise Exception('key register failed')
            else:
                lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic()))
            p2p_service.SendAck(newpacket)
            if _Debug:
                lg.info('received and stored locally a new key %s, include_private=%s' % (key_id, key_json.get('include_private')))
            return True
        # received key is a private key
        if my_keys.is_key_registered(key_id):
            # check if we already have that key
            if my_keys.is_key_private(key_id):
                # we have already private key with same ID!!!
                if my_keys.get_private_key_raw(key_id) != key_object.toPrivateString():
                    # and this is a new private key : we should not overwrite!
                    raise Exception('private key already registered and it is not matching with received copy')
                # this is the same private key
                p2p_service.SendAck(newpacket)
                lg.warn('received again an exact copy of already existing private key: %s, skip' % key_id)
                return True
            # but we have a public key with same ID already
            # if my_keys.get_public_key_raw(key_id) != key_object.toPublicString():
            #     # and we should not overwrite existing public key as well
            #     raise Exception('another public key already registered with that ID and it is not matching with private key')
            lg.info('erasing public key %s' % key_id)
            my_keys.erase_key(key_id)
            if not my_keys.register_key(key_id, key_object, label=key_label):
                raise Exception('key register failed')
            lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic()))
            p2p_service.SendAck(newpacket)
            return True
        # no private key with given ID was registered
        if not my_keys.register_key(key_id, key_object, label=key_label):
            raise Exception('key register failed')
        lg.info('added new key %s, is_public=%s' % (key_id, key_object.isPublic()))
        p2p_service.SendAck(newpacket)
        return True
    except Exception as exc:
        lg.exc()
        p2p_service.SendFail(newpacket, strng.to_text(exc))
    return False
Ejemplo n.º 16
0
def on_key_received(newpacket, info, status, error_message):
    block = encrypted.Unserialize(newpacket.Payload)
    if block is None:
        lg.out(
            2, 'key_ring.on_key_received ERROR reading data from %s' %
            newpacket.RemoteID)
        return False
    try:
        key_data = block.Data()
        key_json = json.loads(key_data)
        key_id = key_json['key_id']
        key_id, key_object = my_keys.read_key_info(key_json)
        if key_object.isPublic():
            # received key is a public key
            if my_keys.is_key_registered(key_id):
                # but we already have a key with that ID
                if my_keys.is_key_private(key_id):
                    # we should not overwrite existing private key
                    raise Exception('private key already registered')
                if my_keys.get_public_key_raw(
                        key_id, 'openssh') != key_object.toString('openssh'):
                    # and we should not overwrite existing public key as well
                    raise Exception(
                        'another key already registered with that ID')
                p2p_service.SendAck(newpacket)
                lg.warn('received existing public key: %s, skip' % key_id)
                return True
            if not my_keys.register_key(key_id, key_object):
                raise Exception('key register failed')
            else:
                lg.info('added new key %s, is_public=%s' %
                        (key_id, key_object.isPublic()))
            p2p_service.SendAck(newpacket)
            if _Debug:
                lg.info(
                    'received and stored locally a new key %s, include_private=%s'
                    % (key_id, key_json.get('include_private')))
            return True
        # received key is a private key
        if my_keys.is_key_registered(key_id):
            # check if we already have that key
            if my_keys.is_key_private(key_id):
                # we have already private key with same ID!!!
                if my_keys.get_private_key_raw(
                        key_id, 'openssh') != key_object.toString('openssh'):
                    # and this is a new private key : we should not overwrite!
                    raise Exception('private key already registered')
                # this is the same private key
                p2p_service.SendAck(newpacket)
                lg.warn('received existing private key: %s, skip' % key_id)
                return True
            # but we have a public key with same ID
            if my_keys.get_public_key_raw(
                    key_id,
                    'openssh') != key_object.public().toString('openssh'):
                # and we should not overwrite existing public key as well
                raise Exception('another key already registered with that ID')
            lg.info('erasing public key %s' % key_id)
            my_keys.erase_key(key_id)
            if not my_keys.register_key(key_id, key_object):
                raise Exception('key register failed')
            lg.info('added new key %s, is_public=%s' %
                    (key_id, key_object.isPublic()))
            p2p_service.SendAck(newpacket)
            return True
        # no private key with given ID was registered
        if not my_keys.register_key(key_id, key_object):
            raise Exception('key register failed')
        lg.info('added new key %s, is_public=%s' %
                (key_id, key_object.isPublic()))
        p2p_service.SendAck(newpacket)
        return True
    except Exception as exc:
        lg.exc()
        p2p_service.SendFail(newpacket, str(exc))
    return False
Ejemplo n.º 17
0
    def _do_process_inbox_packet(self, *args, **kwargs):
        newpacket, info, _, _ = args[0]
        block = encrypted.Unserialize(newpacket.Payload)
        if block is None:
            lg.err('reading data from %s' % newpacket.CreatorID)
            return
        try:
            session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey)
            padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData, session_key_type=block.SessionKeyType)
            inpt = BytesIO(padded_data[:int(block.Length)])
            data = inpt.read()
        except:
            lg.err('reading data from %s' % newpacket.CreatorID)
            lg.exc()
            try:
                inpt.close()
            except:
                pass
            return
        inpt.close()

        if newpacket.Command == commands.RelayAck():
            try:
                ack_info = serialization.BytesToDict(data, keys_to_text=True, values_to_text=True)
            except:
                lg.exc()
                return
            if _Debug:
                lg.out(_DebugLevel, '<<<Relay-ACK %s:%s from %s://%s with %d bytes %s' % (
                    ack_info['command'], ack_info['packet_id'], info.proto, info.host, len(data), ack_info['error'], ))
            if _PacketLogFileEnabled:
                lg.out(0, '                \033[0;49;33mRELAY ACK %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % (
                    ack_info['command'], ack_info['packet_id'], info.bytes_received,
                    global_id.UrlToGlobalID(ack_info['from']), global_id.UrlToGlobalID(ack_info['to']),
                    info.transfer_id), log_name='packet', showtime=True)
            from transport.proxy import proxy_sender
            if proxy_sender.A():
                proxy_sender.A('relay-ack', ack_info, info)
            return True

        if newpacket.Command == commands.RelayFail():
            try:
                fail_info = serialization.BytesToDict(data, keys_to_text=True, values_to_text=True)
            except:
                lg.exc()
                return
            if _Debug:
                lg.out(_DebugLevel, '<<<Relay-FAIL %s:%s from %s://%s with %d bytes %s' % (
                    fail_info['command'], fail_info['packet_id'], info.proto, info.host, len(data), fail_info['error'], ))
            if _PacketLogFileEnabled:
                lg.out(0, '                \033[0;49;33mRELAY FAIL %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % (
                    fail_info['command'], fail_info['packet_id'], info.bytes_received,
                    global_id.UrlToGlobalID(fail_info['from']), global_id.UrlToGlobalID(fail_info['to']),
                    info.transfer_id), log_name='packet', showtime=True)
            from transport.proxy import proxy_sender
            if proxy_sender.A():
                proxy_sender.A('relay-failed', fail_info, info)
            return True

        routed_packet = signed.Unserialize(data)
        if not routed_packet:
            lg.err('unserialize packet failed from %s' % newpacket.CreatorID)
            return

        if _Debug:
            lg.out(_DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % (
                str(routed_packet), info.proto, info.host, len(data)))
        if _PacketLogFileEnabled:
            lg.out(0, '                \033[0;49;33mRELAY IN %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % (
                routed_packet.Command, routed_packet.PacketID, info.bytes_received,
                global_id.UrlToGlobalID(info.sender_idurl), global_id.UrlToGlobalID(routed_packet.RemoteID),
                info.transfer_id), log_name='packet', showtime=True)

        if routed_packet.Command == commands.Identity():
            if _Debug:
                lg.out(_DebugLevel, '    found identity in relay packet %s' % routed_packet)
            newidentity = identity.identity(xmlsrc=routed_packet.Payload)
            idurl = newidentity.getIDURL()
            if not identitycache.HasKey(idurl):
                lg.info('received new identity %s rev %r' % (idurl.original(), newidentity.getRevisionValue(), ))
            if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload):
                lg.warn("ERROR has non-Valid identity")
                return

        if routed_packet.Command in [commands.Relay(), commands.RelayIn(), ] and routed_packet.PacketID.lower().startswith('identity:'):
            if _Debug:
                lg.out(_DebugLevel, '    found routed identity in relay packet %s' % routed_packet)
            try:
                routed_identity = signed.Unserialize(routed_packet.Payload)
                newidentity = identity.identity(xmlsrc=routed_identity.Payload)
                idurl = newidentity.getIDURL()
                if not identitycache.HasKey(idurl):
                    lg.warn('received new "routed" identity: %s' % idurl)
                if not identitycache.UpdateAfterChecking(idurl, routed_identity.Payload):
                    lg.warn("ERROR has non-Valid identity")
                    return
            except:
                lg.exc()

        if newpacket.Command == commands.RelayIn() and routed_packet.Command == commands.Fail():
            if routed_packet.Payload == b'route not exist' or routed_packet.Payload == b'route already closed':
                for pout in packet_out.search_by_packet_id(routed_packet.PacketID):
                    lg.warn('received %r from %r, outgoing packet is failed: %r' % (routed_packet.Payload, newpacket.CreatorID, pout, ))
                    pout.automat('request-failed')
                return

        self.traffic_in += len(data)
        packet_in.process(routed_packet, info)
        del block
        del data
        del padded_data
        del inpt
        del session_key
        del routed_packet
Ejemplo n.º 18
0
def on_files_received(newpacket, info):
    list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID)
    if not list_files_global_id['idurl']:
        lg.warn('invalid PacketID: %s' % newpacket.PacketID)
        return False
    trusted_customer_idurl = list_files_global_id['idurl']
    incoming_key_id = list_files_global_id['key_id']
    if trusted_customer_idurl == my_id.getLocalID():
        if _Debug:
            lg.dbg(_DebugLevel, 'ignore %s packet which seems to came from my own supplier' % newpacket)
        # only process list Files() from other customers who granted me access to their files
        return False
    if not my_keys.is_valid_key_id(incoming_key_id):
        lg.warn('ignore, invalid key id in packet %s' % newpacket)
        return False
    if not my_keys.is_key_private(incoming_key_id):
        lg.warn('private key is not registered : %s' % incoming_key_id)
        p2p_service.SendFail(newpacket, 'private key is not registered')
        return False
    try:
        block = encrypted.Unserialize(
            newpacket.Payload,
            decrypt_key=incoming_key_id,
        )
    except:
        lg.exc(newpacket.Payload)
        return False
    if block is None:
        lg.warn('failed reading data from %s' % newpacket.RemoteID)
        return False
#         if block.CreatorID != trusted_customer_idurl:
#             lg.warn('invalid packet, creator ID must be present in packet ID : %s ~ %s' % (
#                 block.CreatorID, list_files_global_id['idurl'], ))
#             return False
    try:
        raw_files = block.Data()
    except:
        lg.exc()
        return False
    if block.CreatorID == trusted_customer_idurl:
        # this is a trusted guy sending some shared files to me
        try:
            json_data = serialization.BytesToDict(raw_files, keys_to_text=True, encoding='utf-8')
            json_data['items']
        except:
            lg.exc()
            return False
        count = backup_fs.Unserialize(
            raw_data=json_data,
            iter=backup_fs.fs(trusted_customer_idurl),
            iterID=backup_fs.fsID(trusted_customer_idurl),
            from_json=True,
        )
        p2p_service.SendAck(newpacket)
        if count == 0:
            lg.warn('no files were imported during file sharing')
        else:
            backup_control.Save()
            lg.info('imported %d shared files from %s, key_id=%s' % (
                count, trusted_customer_idurl, incoming_key_id, ))
        events.send('shared-list-files-received', dict(
            customer_idurl=trusted_customer_idurl,
            new_items=count,
        ))
        return True
    # otherwise this must be an external supplier sending us a files he stores for trusted customer
    external_supplier_idurl = block.CreatorID
    try:
        supplier_raw_list_files = list_files.UnpackListFiles(raw_files, settings.ListFilesFormat())
    except:
        lg.exc()
        return False
    # need to detect supplier position from the list of packets
    # and place that supplier on the correct position in contactsdb
    supplier_pos = backup_matrix.DetectSupplierPosition(supplier_raw_list_files)
    known_supplier_pos = contactsdb.supplier_position(external_supplier_idurl, trusted_customer_idurl)
    if _Debug:
        lg.args(_DebugLevel, supplier_pos=supplier_pos, known_supplier_pos=known_supplier_pos, external_supplier=external_supplier_idurl,
                trusted_customer=trusted_customer_idurl, key_id=incoming_key_id)
    if supplier_pos >= 0:
        if known_supplier_pos >= 0 and known_supplier_pos != supplier_pos:
            lg.err('known external supplier %r position %d is not matching to received list files position %d for customer %s' % (
                external_supplier_idurl, known_supplier_pos,  supplier_pos, trusted_customer_idurl))
        # TODO: we should remove that bellow because we do not need it
        #     service_customer_family() should take care of suppliers list for trusted customer
        #     so we need to just read that list from DHT
        #     contactsdb.erase_supplier(
        #         idurl=external_supplier_idurl,
        #         customer_idurl=trusted_customer_idurl,
        #     )
        # contactsdb.add_supplier(
        #     idurl=external_supplier_idurl,
        #     position=supplier_pos,
        #     customer_idurl=trusted_customer_idurl,
        # )
        # contactsdb.save_suppliers(customer_idurl=trusted_customer_idurl)
    else:
        lg.warn('not possible to detect external supplier position for customer %s from received list files, known position is %s' % (
            trusted_customer_idurl, known_supplier_pos))
        supplier_pos = known_supplier_pos
    remote_files_changed, _, _, _ = backup_matrix.process_raw_list_files(
        supplier_num=supplier_pos,
        list_files_text_body=supplier_raw_list_files,
        customer_idurl=trusted_customer_idurl,
        is_in_sync=True,
        auto_create=True,
    )
    if remote_files_changed:
        backup_matrix.SaveLatestRawListFiles(
            supplier_idurl=external_supplier_idurl,
            raw_data=supplier_raw_list_files,
            customer_idurl=trusted_customer_idurl,
        )
    # finally sending Ack() packet back
    p2p_service.SendAck(newpacket)
    if remote_files_changed:
        lg.info('received updated list of files from external supplier %s for customer %s' % (external_supplier_idurl, trusted_customer_idurl))
    return True
Ejemplo n.º 19
0
 def _do_forward_outbox_packet(self, outpacket_info_tuple):
     """
     This packet addressed to me but contain routed data to be transferred to another node.
     I will decrypt with my private key and send to outside world further.
     """
     newpacket, info = outpacket_info_tuple
     block = encrypted.Unserialize(newpacket.Payload)
     if block is None:
         lg.out(
             2,
             'proxy_router.doForwardOutboxPacket ERROR reading data from %s'
             % newpacket.RemoteID)
         return
     try:
         session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey)
         padded_data = key.DecryptWithSessionKey(session_key,
                                                 block.EncryptedData)
         inpt = BytesIO(padded_data[:int(block.Length)])
         # see proxy_sender.ProxySender : _on_first_outbox_packet() for sending part
         json_payload = serialization.BytesToDict(inpt.read(),
                                                  keys_to_text=True)
         inpt.close()
         sender_idurl = json_payload['f']  # from
         receiver_idurl = json_payload['t']  # to
         wide = json_payload['w']  # wide
         routed_data = json_payload['p']  # payload
     except:
         lg.out(
             2,
             'proxy_router.doForwardOutboxPacket ERROR reading data from %s'
             % newpacket.RemoteID)
         lg.exc()
         try:
             inpt.close()
         except:
             pass
         return
     route = self.routes.get(sender_idurl, None)
     if not route:
         inpt.close()
         lg.warn('route with %s not found' % (sender_idurl))
         p2p_service.SendFail(newpacket,
                              'route not exist',
                              remote_idurl=sender_idurl)
         return
     routed_packet = signed.Unserialize(routed_data)
     if not routed_packet or not routed_packet.Valid():
         lg.out(
             2,
             'proxy_router.doForwardOutboxPacket ERROR unserialize packet from %s'
             % newpacket.RemoteID)
         return
     # send the packet directly to target user_id
     # we pass not callbacks because all response packets from this call will be also re-routed
     pout = packet_out.create(
         routed_packet,
         wide=wide,
         callbacks={},
         target=receiver_idurl,
     )
     if _Debug:
         lg.out(
             _DebugLevel,
             '>>>Relay-IN-OUT %d bytes from %s at %s://%s :' % (
                 len(routed_data),
                 nameurl.GetName(sender_idurl),
                 info.proto,
                 info.host,
             ))
         lg.out(
             _DebugLevel, '    routed to %s : %s' %
             (nameurl.GetName(receiver_idurl), pout))
     del block
     del routed_data
     del padded_data
     del route
     del inpt
     del session_key
     del routed_packet
Ejemplo n.º 20
0
    def _on_files_received(self, newpacket, info):
        from logs import lg
        from lib import serialization
        from main import settings
        from main import events
        from p2p import p2p_service
        from storage import backup_fs
        from storage import backup_control
        from crypt import encrypted
        from crypt import my_keys
        from userid import my_id
        from userid import global_id
        from storage import backup_matrix
        from supplier import list_files
        from contacts import contactsdb
        list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID)
        if not list_files_global_id['idurl']:
            lg.warn('invalid PacketID: %s' % newpacket.PacketID)
            return False
        trusted_customer_idurl = list_files_global_id['idurl']
        incoming_key_id = list_files_global_id['key_id']
        if trusted_customer_idurl == my_id.getGlobalID():
            lg.warn('skip %s packet which seems to came from my own supplier' %
                    newpacket)
            # only process list Files() from other users who granted me access
            return False
        if not my_keys.is_valid_key_id(incoming_key_id):
            lg.warn('ignore, invalid key id in packet %s' % newpacket)
            return False
        if not my_keys.is_key_private(incoming_key_id):
            lg.warn('private key is not registered : %s' % incoming_key_id)
            p2p_service.SendFail(newpacket, 'private key is not registered')
            return False
        try:
            block = encrypted.Unserialize(
                newpacket.Payload,
                decrypt_key=incoming_key_id,
            )
        except:
            lg.exc(newpacket.Payload)
            return False
        if block is None:
            lg.warn('failed reading data from %s' % newpacket.RemoteID)
            return False
#         if block.CreatorID != trusted_customer_idurl:
#             lg.warn('invalid packet, creator ID must be present in packet ID : %s ~ %s' % (
#                 block.CreatorID, list_files_global_id['idurl'], ))
#             return False
        try:
            raw_files = block.Data()
        except:
            lg.exc()
            return False
        if block.CreatorID == trusted_customer_idurl:
            # this is a trusted guy sending some shared files to me
            try:
                json_data = serialization.BytesToDict(raw_files,
                                                      keys_to_text=True)
                json_data['items']
            except:
                lg.exc()
                return False
            count = backup_fs.Unserialize(
                raw_data=json_data,
                iter=backup_fs.fs(trusted_customer_idurl),
                iterID=backup_fs.fsID(trusted_customer_idurl),
                from_json=True,
            )
            p2p_service.SendAck(newpacket)
            events.send(
                'shared-list-files-received',
                dict(
                    customer_idurl=trusted_customer_idurl,
                    new_items=count,
                ))
            if count == 0:
                lg.warn('no files were imported during file sharing')
            else:
                backup_control.Save()
                lg.info('imported %d shared files from %s, key_id=%s' % (
                    count,
                    trusted_customer_idurl,
                    incoming_key_id,
                ))
            return True
        # otherwise this must be an external supplier sending us a files he stores for trusted customer
        external_supplier_idurl = block.CreatorID
        try:
            supplier_raw_list_files = list_files.UnpackListFiles(
                raw_files, settings.ListFilesFormat())
            backup_matrix.SaveLatestRawListFiles(
                supplier_idurl=external_supplier_idurl,
                raw_data=supplier_raw_list_files,
                customer_idurl=trusted_customer_idurl,
            )
        except:
            lg.exc()
            return False
        # need to detect supplier position from the list of packets
        # and place that supplier on the correct position in contactsdb
        real_supplier_pos = backup_matrix.DetectSupplierPosition(
            supplier_raw_list_files)
        known_supplier_pos = contactsdb.supplier_position(
            external_supplier_idurl, trusted_customer_idurl)
        if real_supplier_pos >= 0:
            if known_supplier_pos >= 0 and known_supplier_pos != real_supplier_pos:
                lg.warn(
                    'external supplier %s position is not matching to list files, rewriting for customer %s'
                    % (external_supplier_idurl, trusted_customer_idurl))
                contactsdb.erase_supplier(
                    idurl=external_supplier_idurl,
                    customer_idurl=trusted_customer_idurl,
                )
            contactsdb.add_supplier(
                idurl=external_supplier_idurl,
                position=real_supplier_pos,
                customer_idurl=trusted_customer_idurl,
            )
            contactsdb.save_suppliers(customer_idurl=trusted_customer_idurl)
        else:
            lg.warn(
                'not possible to detect external supplier position for customer %s'
                % trusted_customer_idurl)
        # finally send ack packet back
        p2p_service.SendAck(newpacket)
        lg.info(
            'received list of packets from external supplier %s for customer %s'
            % (external_supplier_idurl, trusted_customer_idurl))
        return True
Ejemplo n.º 21
0
 def doForwardOutboxPacket(self, arg):
     """
     Action method.
     """
     # decrypt with my key and send to outside world
     newpacket, info = arg
     block = encrypted.Unserialize(newpacket.Payload)
     if block is None:
         lg.out(
             2,
             'proxy_router.doForwardOutboxPacket ERROR reading data from %s'
             % newpacket.RemoteID)
         return
     try:
         session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey)
         padded_data = key.DecryptWithSessionKey(session_key,
                                                 block.EncryptedData)
         inpt = cStringIO.StringIO(padded_data[:int(block.Length)])
         sender_idurl = inpt.readline().rstrip('\n')
         receiver_idurl = inpt.readline().rstrip('\n')
         wide = inpt.readline().rstrip('\n')
         wide = wide == 'wide'
     except:
         lg.out(
             2,
             'proxy_router.doForwardOutboxPacket ERROR reading data from %s'
             % newpacket.RemoteID)
         lg.exc()
         try:
             inpt.close()
         except:
             pass
         return
     route = self.routes.get(sender_idurl, None)
     if not route:
         inpt.close()
         lg.warn('route with %s not found' % (sender_idurl))
         p2p_service.SendFail(newpacket,
                              'route not exist',
                              remote_idurl=sender_idurl)
         return
     data = inpt.read()
     inpt.close()
     routed_packet = signed.Unserialize(data)
     if not routed_packet:
         lg.out(
             2,
             'proxy_router.doForwardOutboxPacket ERROR unserialize packet from %s'
             % newpacket.RemoteID)
         return
     # send the packet directly to target user_id
     # we pass not callbacks because all response packets from this call will be also re-routed
     pout = packet_out.create(
         routed_packet,
         wide=wide,
         callbacks={},
         target=receiver_idurl,
     )
     # gateway.outbox(routed_packet, wide=wide)
     if _Debug:
         lg.out(
             _DebugLevel, '>>>Relay-OUT %d bytes from %s at %s://%s :' % (
                 len(data),
                 nameurl.GetName(sender_idurl),
                 info.proto,
                 info.host,
             ))
         lg.out(
             _DebugLevel, '    routed to %s : %s' %
             (nameurl.GetName(receiver_idurl), pout))
     del block
     del data
     del padded_data
     del route
     del inpt
     del session_key
     del routed_packet
Ejemplo n.º 22
0
    def _do_process_inbox_packet(self, *args, **kwargs):
        newpacket, info, _, _ = args[0]
        block = encrypted.Unserialize(newpacket.Payload)
        if block is None:
            lg.err('reading data from %s' % newpacket.CreatorID)
            return
        try:
            session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey)
            padded_data = key.DecryptWithSessionKey(
                session_key,
                block.EncryptedData,
                session_key_type=block.SessionKeyType)
            inpt = BytesIO(padded_data[:int(block.Length)])
            data = inpt.read()
        except:
            lg.err('reading data from %s' % newpacket.CreatorID)
            lg.exc()
            try:
                inpt.close()
            except:
                pass
            return
        inpt.close()
        routed_packet = signed.Unserialize(data)
        if not routed_packet:
            lg.err('unserialize packet failed from %s' % newpacket.CreatorID)
            return
        if _Debug:
            lg.out(
                _DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' %
                (str(routed_packet), info.proto, info.host, len(data)))
        if _PacketLogFileEnabled:
            lg.out(
                0,
                '                \033[0;49;33mRELAY IN %s(%s) with %d bytes from %s to %s TID:%s\033[0m'
                % (routed_packet.Command, routed_packet.PacketID,
                   info.bytes_received,
                   global_id.UrlToGlobalID(info.sender_idurl),
                   global_id.UrlToGlobalID(
                       routed_packet.RemoteID), info.transfer_id),
                log_name='packet',
                showtime=True)
        if routed_packet.Command == commands.Identity():
            if _Debug:
                lg.out(_DebugLevel,
                       '    found identity in relay packet %s' % routed_packet)
            newidentity = identity.identity(xmlsrc=routed_packet.Payload)
            idurl = newidentity.getIDURL()
            if not identitycache.HasKey(idurl):
                lg.info('received new identity %s rev %r' % (
                    idurl.original(),
                    newidentity.getRevisionValue(),
                ))
            if not identitycache.UpdateAfterChecking(idurl,
                                                     routed_packet.Payload):
                lg.warn("ERROR has non-Valid identity")
                return
        if routed_packet.Command == commands.Relay(
        ) and routed_packet.PacketID.lower().startswith('identity:'):
            if _Debug:
                lg.out(
                    _DebugLevel,
                    '    found routed identity in relay packet %s' %
                    routed_packet)
            try:
                routed_identity = signed.Unserialize(routed_packet.Payload)
                newidentity = identity.identity(xmlsrc=routed_identity.Payload)
                idurl = newidentity.getIDURL()
                if not identitycache.HasKey(idurl):
                    lg.warn('received new "routed" identity: %s' % idurl)
                if not identitycache.UpdateAfterChecking(
                        idurl, routed_identity.Payload):
                    lg.warn("ERROR has non-Valid identity")
                    return
            except:
                lg.exc()
#         if not routed_packet.Valid():
#             lg.err('invalid packet %s from %s' % (
#                 routed_packet, newpacket.CreatorID, ))
#             return
        self.traffic_in += len(data)
        packet_in.process(routed_packet, info)
        del block
        del data
        del padded_data
        del inpt
        del session_key
        del routed_packet