Example #1
0
def ReadIndex(text_data, encoding='utf-8'):
    """
    Read index data base, ``input`` is a ``StringIO.StringIO`` object which
    keeps the data.

    This is a simple text format, see ``p2p.backup_fs.Serialize()``
    method. The first line keeps revision number.
    """
    global _LoadingFlag
    if _LoadingFlag:
        return False
    _LoadingFlag = True
    backup_fs.Clear()
    count = 0
    try:
        json_data = jsn.loads(
            text_data,
            encoding=encoding,
        )
    except:
        lg.exc()
        json_data = text_data
    if _Debug:
        lg.args(_DebugLevel, json_data=json_data)
    for customer_id in json_data.keys():
        if customer_id == 'items':
            try:
                count = backup_fs.Unserialize(json_data,
                                              from_json=True,
                                              decoding=encoding)
            except:
                lg.exc()
                return False
        else:
            customer_idurl = global_id.GlobalUserToIDURL(customer_id)
            if not id_url.is_cached(customer_idurl):
                lg.warn(
                    'identity %r is not yet cached, skip reading related catalog items'
                    % customer_idurl)
                identitycache.immediatelyCaching(customer_idurl,
                                                 try_other_sources=False,
                                                 ignore_errors=True)
                continue
            try:
                count = backup_fs.Unserialize(
                    json_data[customer_id],
                    iter=backup_fs.fs(customer_idurl),
                    iterID=backup_fs.fsID(customer_idurl),
                    from_json=True,
                    decoding=encoding,
                )
            except:
                lg.exc()
                return False
    if _Debug:
        lg.out(_DebugLevel, 'backup_control.ReadIndex %d items loaded' % count)
    # local_site.update_backup_fs(backup_fs.ListAllBackupIDsSQL())
    # commit(new_revision)
    _LoadingFlag = False
    return True
Example #2
0
def ReadIndex(raw_data):
    """
    Read index data base, ``input`` is a ``cStringIO.StringIO`` object which
    keeps the data.

    This is a simple text format, see ``p2p.backup_fs.Serialize()``
    method. The first line keeps revision number.
    """
    global _LoadingFlag
    if _LoadingFlag:
        return False
    _LoadingFlag = True
#     try:
#         new_revision = int(inpt.readline().rstrip('\n'))
#     except:
#         _LoadingFlag = False
#         lg.exc()
#         return False
    backup_fs.Clear()
    try:
        count = backup_fs.Unserialize(raw_data, from_json=True)
    except KeyError:
        lg.warn('fallback to old (non-json) index format')
        count = backup_fs.Unserialize(raw_data, from_json=False)
    except ValueError:
        lg.exc()
        return False
    if _Debug:
        lg.out(_DebugLevel, 'backup_control.ReadIndex %d items loaded' % count)
    # local_site.update_backup_fs(backup_fs.ListAllBackupIDsSQL())
    # commit(new_revision)
    _LoadingFlag = False
    return True
Example #3
0
def ReadIndex(text_data, encoding='utf-8'):
    """
    Read index data base, ``input`` is a ``StringIO.StringIO`` object which
    keeps the data.

    This is a simple text format, see ``p2p.backup_fs.Serialize()``
    method. The first line keeps revision number.
    """
    global _LoadingFlag
    if _LoadingFlag:
        return False
    _LoadingFlag = True
    backup_fs.Clear()
    count = 0
    try:
        json_data = jsn.loads(
            text_data,
            encoding=encoding,
        )
    except:
        lg.exc()
        json_data = text_data
    if _Debug:
        import pprint
        lg.out(_DebugLevel, pprint.pformat(json_data))
    for customer_id in json_data.keys():
        if customer_id == 'items':
            try:
                count = backup_fs.Unserialize(json_data,
                                              from_json=True,
                                              decoding=encoding)
            except:
                lg.exc()
                return False
        else:
            customer_idurl = global_id.GlobalUserToIDURL(customer_id)
            try:
                count = backup_fs.Unserialize(
                    json_data[customer_id],
                    iter=backup_fs.fs(customer_idurl),
                    iterID=backup_fs.fsID(customer_idurl),
                    from_json=True,
                    decoding=encoding,
                )
            except:
                lg.exc()
                return False
    if _Debug:
        lg.out(_DebugLevel, 'backup_control.ReadIndex %d items loaded' % count)
    # local_site.update_backup_fs(backup_fs.ListAllBackupIDsSQL())
    # commit(new_revision)
    _LoadingFlag = False
    return True
Example #4
0
    def _on_files_received(self, newpacket, info):
        import json
        from logs import lg
        from p2p import p2p_service
        from storage import backup_fs
        from storage import backup_control
        from crypt import encrypted
        from crypt import my_keys
        from userid import my_id
        from userid import global_id
        try:
            user_id = newpacket.PacketID.strip().split(':')[0]
            if user_id == my_id.getGlobalID():
                # skip my own Files() packets which comes from my suppliers
                # only process list Files() from other users who granted me access
                return False
            key_id = user_id
            if not my_keys.is_valid_key_id(key_id):
                # ignore, invalid key id in packet id
                return False
            if not my_keys.is_key_private(key_id):
                raise Exception('private key is not registered')
        except Exception as exc:
            lg.warn(str(exc))
            p2p_service.SendFail(newpacket, str(exc))
            return False
        block = encrypted.Unserialize(newpacket.Payload)
        if block is None:
            lg.warn('failed reading data from %s' % newpacket.RemoteID)
            return False
        if block.CreatorID != global_id.GlobalUserToIDURL(user_id):
            lg.warn(
                'invalid packet, creator ID must be present in packet ID : %s ~ %s'
                % (
                    block.CreatorID,
                    user_id,
                ))
            return False
        try:
            json_data = json.loads(block.Data(), encoding='utf-8')
            json_data['items']
            customer_idurl = block.CreatorID
            count = backup_fs.Unserialize(
                raw_data=json_data,
                iter=backup_fs.fs(customer_idurl),
                iterID=backup_fs.fsID(customer_idurl),
                from_json=True,
            )
        except Exception as exc:
            lg.exc()
            p2p_service.SendFail(newpacket, str(exc))
            return False
        p2p_service.SendAck(newpacket)
        if count == 0:
            lg.warn('no files were imported during file sharing')
        else:
            backup_control.Save()
            lg.info('imported %d shared files from %s, key_id=%s' % (
                count,
                customer_idurl,
                key_id,
            ))
        return True

        #         from access import shared_access_coordinator
        #         this_share = shared_access_coordinator.get_active_share(key_id)
        #         if not this_share:
        #             lg.warn('share is not opened: %s' % key_id)
        #             p2p_service.SendFail(newpacket, 'share is not opened')
        #             return False
        #         this_share.automat('customer-list-files-received', (newpacket, info, block, ))
        return True
Example #5
0
    def _on_files_received(self, newpacket, info):
        from logs import lg
        from lib import serialization
        from main import settings
        from main import events
        from p2p import p2p_service
        from storage import backup_fs
        from storage import backup_control
        from crypt import encrypted
        from crypt import my_keys
        from userid import my_id
        from userid import global_id
        from storage import backup_matrix
        from supplier import list_files
        from contacts import contactsdb
        list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID)
        if not list_files_global_id['idurl']:
            lg.warn('invalid PacketID: %s' % newpacket.PacketID)
            return False
        trusted_customer_idurl = list_files_global_id['idurl']
        incoming_key_id = list_files_global_id['key_id']
        if trusted_customer_idurl == my_id.getGlobalID():
            lg.warn('skip %s packet which seems to came from my own supplier' %
                    newpacket)
            # only process list Files() from other users who granted me access
            return False
        if not my_keys.is_valid_key_id(incoming_key_id):
            lg.warn('ignore, invalid key id in packet %s' % newpacket)
            return False
        if not my_keys.is_key_private(incoming_key_id):
            lg.warn('private key is not registered : %s' % incoming_key_id)
            p2p_service.SendFail(newpacket, 'private key is not registered')
            return False
        try:
            block = encrypted.Unserialize(
                newpacket.Payload,
                decrypt_key=incoming_key_id,
            )
        except:
            lg.exc(newpacket.Payload)
            return False
        if block is None:
            lg.warn('failed reading data from %s' % newpacket.RemoteID)
            return False
#         if block.CreatorID != trusted_customer_idurl:
#             lg.warn('invalid packet, creator ID must be present in packet ID : %s ~ %s' % (
#                 block.CreatorID, list_files_global_id['idurl'], ))
#             return False
        try:
            raw_files = block.Data()
        except:
            lg.exc()
            return False
        if block.CreatorID == trusted_customer_idurl:
            # this is a trusted guy sending some shared files to me
            try:
                json_data = serialization.BytesToDict(raw_files,
                                                      keys_to_text=True)
                json_data['items']
            except:
                lg.exc()
                return False
            count = backup_fs.Unserialize(
                raw_data=json_data,
                iter=backup_fs.fs(trusted_customer_idurl),
                iterID=backup_fs.fsID(trusted_customer_idurl),
                from_json=True,
            )
            p2p_service.SendAck(newpacket)
            events.send(
                'shared-list-files-received',
                dict(
                    customer_idurl=trusted_customer_idurl,
                    new_items=count,
                ))
            if count == 0:
                lg.warn('no files were imported during file sharing')
            else:
                backup_control.Save()
                lg.info('imported %d shared files from %s, key_id=%s' % (
                    count,
                    trusted_customer_idurl,
                    incoming_key_id,
                ))
            return True
        # otherwise this must be an external supplier sending us a files he stores for trusted customer
        external_supplier_idurl = block.CreatorID
        try:
            supplier_raw_list_files = list_files.UnpackListFiles(
                raw_files, settings.ListFilesFormat())
            backup_matrix.SaveLatestRawListFiles(
                supplier_idurl=external_supplier_idurl,
                raw_data=supplier_raw_list_files,
                customer_idurl=trusted_customer_idurl,
            )
        except:
            lg.exc()
            return False
        # need to detect supplier position from the list of packets
        # and place that supplier on the correct position in contactsdb
        real_supplier_pos = backup_matrix.DetectSupplierPosition(
            supplier_raw_list_files)
        known_supplier_pos = contactsdb.supplier_position(
            external_supplier_idurl, trusted_customer_idurl)
        if real_supplier_pos >= 0:
            if known_supplier_pos >= 0 and known_supplier_pos != real_supplier_pos:
                lg.warn(
                    'external supplier %s position is not matching to list files, rewriting for customer %s'
                    % (external_supplier_idurl, trusted_customer_idurl))
                contactsdb.erase_supplier(
                    idurl=external_supplier_idurl,
                    customer_idurl=trusted_customer_idurl,
                )
            contactsdb.add_supplier(
                idurl=external_supplier_idurl,
                position=real_supplier_pos,
                customer_idurl=trusted_customer_idurl,
            )
            contactsdb.save_suppliers(customer_idurl=trusted_customer_idurl)
        else:
            lg.warn(
                'not possible to detect external supplier position for customer %s'
                % trusted_customer_idurl)
        # finally send ack packet back
        p2p_service.SendAck(newpacket)
        lg.info(
            'received list of packets from external supplier %s for customer %s'
            % (external_supplier_idurl, trusted_customer_idurl))
        return True
Example #6
0
def on_files_received(newpacket, info):
    list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID)
    if not list_files_global_id['idurl']:
        lg.warn('invalid PacketID: %s' % newpacket.PacketID)
        return False
    trusted_customer_idurl = list_files_global_id['idurl']
    incoming_key_id = list_files_global_id['key_id']
    if trusted_customer_idurl == my_id.getLocalID():
        if _Debug:
            lg.dbg(_DebugLevel, 'ignore %s packet which seems to came from my own supplier' % newpacket)
        # only process list Files() from other customers who granted me access to their files
        return False
    if not my_keys.is_valid_key_id(incoming_key_id):
        lg.warn('ignore, invalid key id in packet %s' % newpacket)
        return False
    if not my_keys.is_key_private(incoming_key_id):
        lg.warn('private key is not registered : %s' % incoming_key_id)
        p2p_service.SendFail(newpacket, 'private key is not registered')
        return False
    try:
        block = encrypted.Unserialize(
            newpacket.Payload,
            decrypt_key=incoming_key_id,
        )
    except:
        lg.exc(newpacket.Payload)
        return False
    if block is None:
        lg.warn('failed reading data from %s' % newpacket.RemoteID)
        return False
#         if block.CreatorID != trusted_customer_idurl:
#             lg.warn('invalid packet, creator ID must be present in packet ID : %s ~ %s' % (
#                 block.CreatorID, list_files_global_id['idurl'], ))
#             return False
    try:
        raw_files = block.Data()
    except:
        lg.exc()
        return False
    if block.CreatorID == trusted_customer_idurl:
        # this is a trusted guy sending some shared files to me
        try:
            json_data = serialization.BytesToDict(raw_files, keys_to_text=True, encoding='utf-8')
            json_data['items']
        except:
            lg.exc()
            return False
        count = backup_fs.Unserialize(
            raw_data=json_data,
            iter=backup_fs.fs(trusted_customer_idurl),
            iterID=backup_fs.fsID(trusted_customer_idurl),
            from_json=True,
        )
        p2p_service.SendAck(newpacket)
        if count == 0:
            lg.warn('no files were imported during file sharing')
        else:
            backup_control.Save()
            lg.info('imported %d shared files from %s, key_id=%s' % (
                count, trusted_customer_idurl, incoming_key_id, ))
        events.send('shared-list-files-received', dict(
            customer_idurl=trusted_customer_idurl,
            new_items=count,
        ))
        return True
    # otherwise this must be an external supplier sending us a files he stores for trusted customer
    external_supplier_idurl = block.CreatorID
    try:
        supplier_raw_list_files = list_files.UnpackListFiles(raw_files, settings.ListFilesFormat())
    except:
        lg.exc()
        return False
    # need to detect supplier position from the list of packets
    # and place that supplier on the correct position in contactsdb
    supplier_pos = backup_matrix.DetectSupplierPosition(supplier_raw_list_files)
    known_supplier_pos = contactsdb.supplier_position(external_supplier_idurl, trusted_customer_idurl)
    if _Debug:
        lg.args(_DebugLevel, supplier_pos=supplier_pos, known_supplier_pos=known_supplier_pos, external_supplier=external_supplier_idurl,
                trusted_customer=trusted_customer_idurl, key_id=incoming_key_id)
    if supplier_pos >= 0:
        if known_supplier_pos >= 0 and known_supplier_pos != supplier_pos:
            lg.err('known external supplier %r position %d is not matching to received list files position %d for customer %s' % (
                external_supplier_idurl, known_supplier_pos,  supplier_pos, trusted_customer_idurl))
        # TODO: we should remove that bellow because we do not need it
        #     service_customer_family() should take care of suppliers list for trusted customer
        #     so we need to just read that list from DHT
        #     contactsdb.erase_supplier(
        #         idurl=external_supplier_idurl,
        #         customer_idurl=trusted_customer_idurl,
        #     )
        # contactsdb.add_supplier(
        #     idurl=external_supplier_idurl,
        #     position=supplier_pos,
        #     customer_idurl=trusted_customer_idurl,
        # )
        # contactsdb.save_suppliers(customer_idurl=trusted_customer_idurl)
    else:
        lg.warn('not possible to detect external supplier position for customer %s from received list files, known position is %s' % (
            trusted_customer_idurl, known_supplier_pos))
        supplier_pos = known_supplier_pos
    remote_files_changed, _, _, _ = backup_matrix.process_raw_list_files(
        supplier_num=supplier_pos,
        list_files_text_body=supplier_raw_list_files,
        customer_idurl=trusted_customer_idurl,
        is_in_sync=True,
        auto_create=True,
    )
    if remote_files_changed:
        backup_matrix.SaveLatestRawListFiles(
            supplier_idurl=external_supplier_idurl,
            raw_data=supplier_raw_list_files,
            customer_idurl=trusted_customer_idurl,
        )
    # finally sending Ack() packet back
    p2p_service.SendAck(newpacket)
    if remote_files_changed:
        lg.info('received updated list of files from external supplier %s for customer %s' % (external_supplier_idurl, trusted_customer_idurl))
    return True