Exemplo n.º 1
0
def backup_outgoing_message(private_message_object, message_id):
    """
    """
    if not driver.is_on('service_backups'):
        lg.warn('service_backups is not started')
        return False
    serialized_message = private_message_object.serialize()
    local_msg_folder = os.path.join(settings.ChatChannelsDir(),
                                    private_message_object.recipient, 'out')
    if not bpio._dir_exist(local_msg_folder):
        bpio._dirs_make(local_msg_folder)
    local_msg_filename = os.path.join(local_msg_folder, message_id)
    if not bpio.WriteBinaryFile(local_msg_filename, serialized_message):
        lg.warn('failed writing outgoing message locally')
        return False
    remote_path_for_message = os.path.join('.messages', 'out',
                                           private_message_object.recipient,
                                           message_id)
    global_message_path = global_id.MakeGlobalID(customer=messages_key_id(),
                                                 path=remote_path_for_message)
    res = api.file_create(global_message_path)
    if res['status'] != 'OK':
        lg.warn('failed to create path "%s" in the catalog: %s' %
                (global_message_path, res['errors']))
        return False
    res = api.file_upload_start(local_msg_filename,
                                global_message_path,
                                wait_result=False)
    if res['status'] != 'OK':
        lg.warn('failed to upload message "%s": %s' %
                (global_message_path, res['errors']))
        return False
    return True
Exemplo n.º 2
0
 def doSavePacket(self, *args, **kwargs):
     """
     Action method.
     """
     if not args or not args[0]:
         raise Exception('no input found')
     NewPacket, PacketID = args[0]
     glob_path = global_id.ParseGlobalID(PacketID, detect_version=True)
     packetID = global_id.CanonicalID(PacketID)
     customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull(packetID)
     if dataORparity == 'Data':
         self.OnHandData[SupplierNumber] = True
     elif dataORparity == 'Parity':
         self.OnHandParity[SupplierNumber] = True
     if not NewPacket:
         lg.warn('packet %r already exists locally' % packetID)
         return
     filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path'])
     dirpath = os.path.dirname(filename)
     if not os.path.exists(dirpath):
         try:
             bpio._dirs_make(dirpath)
         except:
             lg.exc()
     # either way the payload of packet is saved
     if not bpio.WriteBinaryFile(filename, NewPacket.Payload):
         lg.err("unable to write to %s" % filename)
         return
     if self.packetInCallback is not None:
         self.packetInCallback(self.backup_id, NewPacket)
     if _Debug:
         lg.out(_DebugLevel, "restore_worker.doSavePacket %s saved to %s" % (packetID, filename))
Exemplo n.º 3
0
def backup_done(bid, result):
    from crypt import signed
    try:
        os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.out'))
    except:
        pass
    for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(),
                                            bid)):
        filepath = os.path.join(settings.getLocalBackupsDir(), bid, filename)
        payld = str(bpio.ReadBinaryFile(filepath))
        outpacket = signed.Packet('Data', my_id.getLocalID(),
                                  my_id.getLocalID(), filename, payld,
                                  'http://megafaq.ru/cvps1010.xml')
        newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out',
                                   filename)
        bpio.WriteBinaryFile(newfilepath, outpacket.Serialize())
    # Assume we delivered all pieces from ".out" to suppliers and lost original data
    # Then we requested the data back and got it into ".inp"
    try:
        os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.inp'))
    except:
        pass
    for filename in os.listdir(
            os.path.join(settings.getLocalBackupsDir(), bid + '.out')):
        filepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out',
                                filename)
        data = bpio.ReadBinaryFile(filepath)
        inppacket = signed.Unserialize(data)
        assert inppacket
        assert inppacket.Valid()
        newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.inp',
                                   filename)
        bpio.WriteBinaryFile(newfilepath, inppacket.Payload)
    # Now do restore from input data
    backupID = bid + '.inp'
    outfd, tarfilename = tmpfile.make(
        'restore',
        extension='.tar.gz',
        prefix=backupID.replace('/', '_') + '_',
    )
    r = restore_worker.RestoreWorker(backupID, outfd)
    r.MyDeferred.addBoth(restore_done, tarfilename)
    reactor.callLater(1, r.automat, 'init')
Exemplo n.º 4
0
 def _file_received(self, newpacket, state):
     if state in ['in queue', 'shutdown', 'exist', 'failed']:
         return
     if state != 'received':
         lg.warn("incorrect state [%s] for packet %s" %
                 (str(state), str(newpacket)))
         return
     if not newpacket.Valid():
         # TODO: if we didn't get a valid packet ... re-request it or delete
         # it?
         lg.warn("%s is not a valid packet: %r" %
                 (newpacket.PacketID, newpacket))
         return
     # packetID = newpacket.PacketID
     packetID = global_id.CanonicalID(newpacket.PacketID)
     customer, remotePath = packetid.SplitPacketID(packetID)
     filename = os.path.join(settings.getLocalBackupsDir(), customer,
                             remotePath)
     if os.path.isfile(filename):
         lg.warn("found existed file" + filename)
         self.automat('inbox-data-packet', packetID)
         return
         # try:
         #     os.remove(filename)
         # except:
         #     lg.exc()
     dirname = os.path.dirname(filename)
     if not os.path.exists(dirname):
         try:
             bpio._dirs_make(dirname)
         except:
             lg.out(
                 2,
                 "backup_rebuilder._file_received ERROR can not create sub dir: "
                 + dirname)
             return
     if not bpio.WriteBinaryFile(filename, newpacket.Payload):
         lg.out(2,
                "backup_rebuilder._file_received ERROR writing " + filename)
         return
     from storage import backup_matrix
     backup_matrix.LocalFileReport(packetID)
     lg.out(10, "backup_rebuilder._file_received and wrote to " + filename)
     self.automat('inbox-data-packet', packetID)
Exemplo n.º 5
0
 def _bk_done(bid, result):
     from crypt import signed
     customer, remotePath = packetid.SplitPacketID(bid)
     try:
         os.mkdir(os.path.join(settings.getLocalBackupsDir(), customer, remotePath + '.out'))
     except:
         pass
     for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(), customer, remotePath)):
         filepath = os.path.join(settings.getLocalBackupsDir(), customer, remotePath, filename)
         payld = bpio.ReadBinaryFile(filepath)
         newpacket = signed.Packet(
             'Data',
             my_id.getLocalID(),
             my_id.getLocalID(),
             filename,
             payld,
             'http://megafaq.ru/cvps1010.xml')
         newfilepath = os.path.join(settings.getLocalBackupsDir(), customer, remotePath + '.out', filename)
         bpio.WriteBinaryFile(newfilepath, newpacket.Serialize())
     reactor.stop()
Exemplo n.º 6
0
def rewrite_indexes(db_instance, source_db_instance):
    """
    """
    if _Debug:
        lg.out(_DebugLevel, 'coins_db.rewrite_indexes')
    source_location = os.path.join(source_db_instance.path, '_indexes')
    source_indexes = os.listdir(source_location)
    existing_location = os.path.join(db_instance.path, '_indexes')
    existing_indexes = os.listdir(existing_location)
    for existing_index_file in existing_indexes:
        if existing_index_file != '00id.py':
            index_name = existing_index_file[2:existing_index_file.index('.')]
            existing_index_path = os.path.join(existing_location,
                                               existing_index_file)
            os.remove(existing_index_path)
            if _Debug:
                lg.out(_DebugLevel,
                       '        removed index at %s' % existing_index_path)
            buck_path = os.path.join(db_instance.path, index_name + '_buck')
            if os.path.isfile(buck_path):
                os.remove(buck_path)
                if _Debug:
                    lg.out(_DebugLevel,
                           '            also bucket at %s' % buck_path)
            stor_path = os.path.join(db_instance.path, index_name + '_stor')
            if os.path.isfile(stor_path):
                os.remove(stor_path)
                if _Debug:
                    lg.out(_DebugLevel,
                           '            also storage at %s' % stor_path)
    for source_index_file in source_indexes:
        if source_index_file != '00id.py':
            index_name = source_index_file[2:source_index_file.index('.')]
            destination_index_path = os.path.join(existing_location,
                                                  source_index_file)
            source_index_path = os.path.join(source_location,
                                             source_index_file)
            if not bpio.WriteTextFile(destination_index_path,
                                      bpio.ReadTextFile(source_index_path)):
                lg.warn('failed writing index to %s' % destination_index_path)
                continue
            destination_buck_path = os.path.join(db_instance.path,
                                                 index_name + '_buck')
            source_buck_path = os.path.join(source_db_instance.path,
                                            index_name + '_buck')
            if not bpio.WriteBinaryFile(destination_buck_path,
                                        bpio.ReadBinaryFile(source_buck_path)):
                lg.warn('failed writing index bucket to %s' %
                        destination_buck_path)
                continue
            destination_stor_path = os.path.join(db_instance.path,
                                                 index_name + '_stor')
            source_stor_path = os.path.join(source_db_instance.path,
                                            index_name + '_stor')
            if not bpio.WriteBinaryFile(destination_stor_path,
                                        bpio.ReadBinaryFile(source_stor_path)):
                lg.warn('failed writing index storage to %s' %
                        destination_stor_path)
                continue
            if _Debug:
                lg.out(
                    _DebugLevel, '        wrote index %s from %s' %
                    (index_name, source_index_path))
Exemplo n.º 7
0
def on_data(newpacket):
    if id_url.to_bin(newpacket.OwnerID) == my_id.getIDURL().to_bin():
        # this Data belong to us, SKIP
        return False


#     if not contactsdb.is_customer(newpacket.OwnerID):
#         # SECURITY
#         # TODO: process files from another customer : glob_path['idurl']
#         lg.warn("skip, %s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID))
#         # p2p_service.SendFail(newpacket, 'not a customer')
#         return False
    glob_path = global_id.ParseGlobalID(newpacket.PacketID)
    if not glob_path['path']:
        # backward compatible check
        glob_path = global_id.ParseGlobalID(
            my_id.getGlobalID('master') + ':' + newpacket.PacketID)
    if not glob_path['path']:
        lg.err("got incorrect PacketID")
        # p2p_service.SendFail(newpacket, 'incorrect path')
        return False
    authorized_idurl = verify_packet_ownership(newpacket)
    if authorized_idurl is None:
        lg.err("ownership verification failed for %r" % newpacket)
        # p2p_service.SendFail(newpacket, 'ownership verification failed')
        return False
    filename = make_valid_filename(newpacket.OwnerID, glob_path)
    if not filename:
        lg.warn("got empty filename, bad customer or wrong packetID?")
        # p2p_service.SendFail(newpacket, 'empty filename')
        return False
    dirname = os.path.dirname(filename)
    if not os.path.exists(dirname):
        try:
            bpio._dirs_make(dirname)
        except:
            lg.err("can not create sub dir %s" % dirname)
            p2p_service.SendFail(newpacket,
                                 'write error',
                                 remote_idurl=authorized_idurl)
            return False
    data = newpacket.Serialize()
    donated_bytes = settings.getDonatedBytes()
    accounting.check_create_customers_quotas(donated_bytes)
    space_dict, _ = accounting.read_customers_quotas()
    if newpacket.OwnerID.to_bin() not in list(space_dict.keys()):
        lg.err("customer space is broken, no info about donated space for %s" %
               newpacket.OwnerID)
        p2p_service.SendFail(
            newpacket,
            'customer space is broken, no info about donated space',
            remote_idurl=authorized_idurl)
        return False
    used_space_dict = accounting.read_customers_usage()
    if newpacket.OwnerID.to_bin() in list(used_space_dict.keys()):
        try:
            bytes_used_by_customer = int(
                used_space_dict[newpacket.OwnerID.to_bin()])
            bytes_donated_to_customer = int(
                space_dict[newpacket.OwnerID.to_bin()])
            if bytes_donated_to_customer - bytes_used_by_customer < len(data):
                lg.warn("no free space left for customer data: %s" %
                        newpacket.OwnerID)
                p2p_service.SendFail(newpacket,
                                     'no free space left for customer data',
                                     remote_idurl=authorized_idurl)
                return False
        except:
            lg.exc()
    if not bpio.WriteBinaryFile(filename, data):
        lg.err("can not write to %s" % str(filename))
        p2p_service.SendFail(newpacket,
                             'write error',
                             remote_idurl=authorized_idurl)
        return False
    # Here Data() packet was stored as it is on supplier node (current machine)
    del data
    p2p_service.SendAck(newpacket,
                        response=strng.to_text(len(newpacket.Payload)),
                        remote_idurl=authorized_idurl)
    reactor.callLater(0, local_tester.TestSpaceTime)  # @UndefinedVariable
    #     if self.publish_event_supplier_file_modified:  #  TODO: must remove that actually
    #         from main import events
    #         events.send('supplier-file-modified', data=dict(
    #             action='write',
    #             glob_path=glob_path['path'],
    #             owner_id=newpacket.OwnerID,
    #         ))
    return True
Exemplo n.º 8
0
 def _on_data(self, newpacket):
     import os
     from twisted.internet import reactor  # @UnresolvedImport
     from logs import lg
     from lib import jsn
     from system import bpio
     from main import settings
     from userid import my_id
     from userid import global_id
     from contacts import contactsdb
     from p2p import p2p_service
     from storage import accounting
     if newpacket.OwnerID == my_id.getLocalID():
         # this Data belong to us, SKIP
         return False
     if not contactsdb.is_customer(newpacket.OwnerID):
         # SECURITY
         # TODO: process files from another customer : glob_path['idurl']
         lg.warn("skip, %s not a customer, packetID=%s" %
                 (newpacket.OwnerID, newpacket.PacketID))
         # p2p_service.SendFail(newpacket, 'not a customer')
         return False
     glob_path = global_id.ParseGlobalID(newpacket.PacketID)
     if not glob_path['path']:
         # backward compatible check
         glob_path = global_id.ParseGlobalID(
             my_id.getGlobalID('master') + ':' + newpacket.PacketID)
     if not glob_path['path']:
         lg.err("got incorrect PacketID")
         p2p_service.SendFail(newpacket, 'incorrect path')
         return False
     filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path)
     if not filename:
         lg.warn("got empty filename, bad customer or wrong packetID?")
         p2p_service.SendFail(newpacket, 'empty filename')
         return False
     dirname = os.path.dirname(filename)
     if not os.path.exists(dirname):
         try:
             bpio._dirs_make(dirname)
         except:
             lg.err("can not create sub dir %s" % dirname)
             p2p_service.SendFail(newpacket, 'write error')
             return False
     data = newpacket.Serialize()
     donated_bytes = settings.getDonatedBytes()
     accounting.check_create_customers_quotas(donated_bytes)
     space_dict = accounting.read_customers_quotas()
     if newpacket.OwnerID not in list(space_dict.keys()):
         lg.err("no info about donated space for %s" % newpacket.OwnerID)
         p2p_service.SendFail(newpacket, 'no info about donated space')
         return False
     used_space_dict = accounting.read_customers_usage()
     if newpacket.OwnerID in list(used_space_dict.keys()):
         try:
             bytes_used_by_customer = int(
                 used_space_dict[newpacket.OwnerID])
             bytes_donated_to_customer = int(space_dict[newpacket.OwnerID])
             if bytes_donated_to_customer - bytes_used_by_customer < len(
                     data):
                 lg.warn("no free space for %s" % newpacket.OwnerID)
                 p2p_service.SendFail(newpacket, 'no free space')
                 return False
         except:
             lg.exc()
     if not bpio.WriteBinaryFile(filename, data):
         lg.err("can not write to %s" % str(filename))
         p2p_service.SendFail(newpacket, 'write error')
         return False
     # Here Data() packet was stored as it is on supplier node (current machine)
     sz = len(data)
     del data
     lg.out(self.debug_level, "service_supplier._on_data %r" % newpacket)
     lg.out(
         self.debug_level, "    from [ %s | %s ]" % (
             newpacket.OwnerID,
             newpacket.CreatorID,
         ))
     lg.out(self.debug_level, "        saved with %d bytes to %s" % (
         sz,
         filename,
     ))
     p2p_service.SendAck(newpacket, str(len(newpacket.Payload)))
     from supplier import local_tester
     reactor.callLater(0, local_tester.TestSpaceTime)  # @UndefinedVariable
     if self.publish_event_supplier_file_modified:
         from main import events
         events.send('supplier-file-modified',
                     data=dict(
                         action='write',
                         glob_path=glob_path['path'],
                         owner_id=newpacket.OwnerID,
                     ))
     return True