def load_groups(): service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) brokers_dir = os.path.join(service_dir, 'brokers') if not os.path.isdir(brokers_dir): bpio._dirs_make(brokers_dir) for group_key_id in os.listdir(groups_dir): if group_key_id not in known_groups(): known_groups()[group_key_id] = { 'last_sequence_id': -1, 'active': False, 'archive_folder_path': None, } group_path = os.path.join(groups_dir, group_key_id) group_info = jsn.loads_text(local_fs.ReadTextFile(group_path)) if group_info: known_groups()[group_key_id] = group_info for customer_id in os.listdir(brokers_dir): customer_path = os.path.join(brokers_dir, customer_id) for broker_id in os.listdir(customer_path): if customer_id not in known_brokers(): known_brokers()[customer_id] = [None, ] * REQUIRED_BROKERS_COUNT if broker_id in known_brokers(customer_id): lg.warn('broker %r already exist' % broker_id) continue broker_path = os.path.join(customer_path, broker_id) broker_info = jsn.loads_text(local_fs.ReadTextFile(broker_path)) known_brokers()[customer_id][int(broker_info['position'])] = broker_id
def backup_outgoing_message(private_message_object, message_id): """ """ if not driver.is_on('service_backups'): lg.warn('service_backups is not started') return False serialized_message = private_message_object.serialize() local_msg_folder = os.path.join(settings.ChatChannelsDir(), private_message_object.recipient, 'out') if not bpio._dir_exist(local_msg_folder): bpio._dirs_make(local_msg_folder) local_msg_filename = os.path.join(local_msg_folder, message_id) if not bpio.WriteBinaryFile(local_msg_filename, serialized_message): lg.warn('failed writing outgoing message locally') return False remote_path_for_message = os.path.join('.messages', 'out', private_message_object.recipient, message_id) global_message_path = global_id.MakeGlobalID(customer=messages_key_id(), path=remote_path_for_message) res = api.file_create(global_message_path) if res['status'] != 'OK': lg.warn('failed to create path "%s" in the catalog: %s' % (global_message_path, res['errors'])) return False res = api.file_upload_start(local_msg_filename, global_message_path, wait_result=False) if res['status'] != 'OK': lg.warn('failed to upload message "%s": %s' % (global_message_path, res['errors'])) return False return True
def _on_restore_done(self, result, backupID, outfd, tarfilename): try: os.close(outfd) except: lg.exc() if result == 'done': lg.info('archive %r restore success from %r' % ( backupID, tarfilename, )) else: lg.err('archive %r restore failed from %r with : %r' % ( backupID, tarfilename, result, )) if result == 'done': _, pathID, versionName = packetid.SplitBackupID(backupID) service_dir = settings.ServiceDir('service_private_groups') queues_dir = os.path.join(service_dir, 'queues') queue_dir = os.path.join(queues_dir, self.group_key_id) snapshot_dir = os.path.join(queue_dir, pathID, versionName) if not os.path.isdir(snapshot_dir): bpio._dirs_make(snapshot_dir) d = backup_tar.extracttar_thread(tarfilename, snapshot_dir) d.addCallback(self._on_extract_done, backupID, tarfilename, snapshot_dir) d.addErrback(self._on_extract_failed, backupID, tarfilename, snapshot_dir) return d tmpfile.throw_out(tarfilename, 'restore ' + result) return None
def doSavePacket(self, *args, **kwargs): """ Action method. """ if not args or not args[0]: raise Exception('no input found') NewPacket, PacketID = args[0] glob_path = global_id.ParseGlobalID(PacketID, detect_version=True) packetID = global_id.CanonicalID(PacketID) customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull(packetID) if dataORparity == 'Data': self.OnHandData[SupplierNumber] = True elif dataORparity == 'Parity': self.OnHandParity[SupplierNumber] = True if not NewPacket: lg.warn('packet %r already exists locally' % packetID) return filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path']) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): try: bpio._dirs_make(dirpath) except: lg.exc() # either way the payload of packet is saved if not bpio.WriteBinaryFile(filename, NewPacket.Payload): lg.err("unable to write to %s" % filename) return if self.packetInCallback is not None: self.packetInCallback(self.backup_id, NewPacket) if _Debug: lg.out(_DebugLevel, "restore_worker.doSavePacket %s saved to %s" % (packetID, filename))
def save_suppliers(path=None, customer_idurl=None): """ Write current suppliers list on the disk, ``path`` is a file path to save. """ if not customer_idurl: customer_idurl = my_id.getLocalID() customer_idurl = strng.to_bin(customer_idurl.strip()) customer_id = global_id.UrlToGlobalID(customer_idurl) if path is None: path = os.path.join( settings.SuppliersDir(), customer_id, 'supplierids', ) lst = suppliers(customer_idurl=customer_idurl) lst = list(map(strng.to_text, lst)) if not os.path.exists(os.path.dirname(path)): bpio._dirs_make(os.path.dirname(path)) bpio._write_list(path, lst) if _Debug: lg.out( _DebugLevel, 'contactsdb.save_suppliers for customer [%s]:\n%r' % ( customer_id, lst, )) return True
def doSavePacket(self, NewPacket): glob_path = global_id.ParseGlobalID(NewPacket.PacketID, detect_version=True) packetID = global_id.CanonicalID(NewPacket.PacketID) customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull( packetID) if dataORparity == 'Data': self.OnHandData[SupplierNumber] = True elif NewPacket.DataOrParity() == 'Parity': self.OnHandParity[SupplierNumber] = True filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path']) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): try: bpio._dirs_make(dirpath) except: lg.exc() # either way the payload of packet is saved if not bpio.WriteFile(filename, NewPacket.Payload): lg.warn("unable to write to %s" % filename) return if self.packetInCallback is not None: self.packetInCallback(self.BackupID, NewPacket) lg.out(6, "restore.doSavePacket %s saved to %s" % (packetID, filename))
def save_group_info(group_key_id): if not is_group_exist(group_key_id): lg.warn('group %r is not known' % group_key_id) return False group_info = known_groups()[group_key_id] service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') group_info_path = os.path.join(groups_dir, group_key_id) if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) ret = local_fs.WriteTextFile(group_info_path, jsn.dumps(group_info)) if _Debug: lg.args(_DebugLevel, group_key_id=group_key_id, group_info_path=group_info_path, ret=ret) return ret
def _file_received(self, newpacket, state): if state in ['in queue', 'shutdown', 'exist', 'failed']: return if state != 'received': lg.warn("incorrect state [%s] for packet %s" % (str(state), str(newpacket))) return if not newpacket.Valid(): # TODO: if we didn't get a valid packet ... re-request it or delete # it? lg.warn("%s is not a valid packet: %r" % (newpacket.PacketID, newpacket)) return # packetID = newpacket.PacketID packetID = global_id.CanonicalID(newpacket.PacketID) customer, remotePath = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, remotePath) if os.path.isfile(filename): lg.warn("found existed file" + filename) self.automat('inbox-data-packet', packetID) return # try: # os.remove(filename) # except: # lg.exc() dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.out( 2, "backup_rebuilder._file_received ERROR can not create sub dir " + dirname) return if not bpio.WriteFile(filename, newpacket.Payload): lg.out(2, "backup_rebuilder._file_received ERROR writing " + filename) return from storage import backup_matrix backup_matrix.LocalFileReport(packetID) lg.out(10, "backup_rebuilder._file_received and wrote to " + filename) self.automat('inbox-data-packet', packetID)
def set_broker(customer_id, broker_id, position=0): service_dir = settings.ServiceDir('service_private_groups') brokers_dir = os.path.join(service_dir, 'brokers') customer_dir = os.path.join(brokers_dir, customer_id) broker_path = os.path.join(customer_dir, broker_id) if os.path.isfile(broker_path): lg.warn('broker %r already exist for customer %r, overwriting' % (broker_id, customer_id, )) if not os.path.isdir(customer_dir): bpio._dirs_make(customer_dir) broker_info = { 'position': position, } if not local_fs.WriteTextFile(broker_path, jsn.dumps(broker_info)): lg.err('failed to set broker %r at position %d for customer %r' % (broker_id, position, customer_id, )) return False known_brokers(customer_id)[position] = broker_id if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id, broker_info=broker_info) return True
def refresh_indexes(db_instance, rewrite=True, reindex=True): """ """ if _Debug: lg.out(_DebugLevel, 'message_db.refresh_indexes in %s' % db_instance.path) ok = True for ind, ind_class_or_filename in message_index.definitions(): if isinstance(ind_class_or_filename, str): chat_history_dir = db_instance.path target_index_filepath = os.path.join(chat_history_dir, '_indexes', ind_class_or_filename) if not os.path.exists(os.path.dirname(target_index_filepath)): bpio._dirs_make(os.path.dirname(target_index_filepath)) bpio.WriteTextFile( target_index_filepath, bpio.ReadTextFile(os.path.join(bpio.getExecutableDir(), 'chat', 'indexes', ind_class_or_filename)), ) ind_obj = 'path:%s' % os.path.basename(target_index_filepath) else: ind_obj = ind_class_or_filename(db_instance.path, ind) if ind not in db_instance.indexes_names: try: db_instance.add_index(ind_obj, create=True) if _Debug: lg.out(_DebugLevel, ' added index %s' % ind) except: lg.exc('failed adding index "%r"' % ind) else: if rewrite: try: db_instance.edit_index(ind_obj, reindex=reindex) if _Debug: lg.out(_DebugLevel, ' updated index %s' % ind) except: lg.exc('failed rewriting index "%r"' % ind) ok = False break return ok
def set_broker(customer_id, broker_id, position=0): customer_id = global_id.latest_glob_id(customer_id) broker_id = global_id.latest_glob_id(broker_id) service_dir = settings.ServiceDir('service_private_groups') brokers_dir = os.path.join(service_dir, 'brokers') customer_dir = os.path.join(brokers_dir, customer_id) broker_path = os.path.join(customer_dir, broker_id) if not os.path.isdir(customer_dir): bpio._dirs_make(customer_dir) if os.path.isfile(broker_path): if _Debug: lg.dbg(_DebugLevel, 'broker %r already exist for customer %r, overwriting' % (broker_id, customer_id, )) broker_info = { 'position': position, } prev_borker_id = known_brokers(customer_id)[position] if prev_borker_id: if prev_borker_id == broker_id: if _Debug: lg.args(_DebugLevel, customer_id=customer_id, position=position, broker_id=broker_id, prev_borker_id=prev_borker_id) return True prev_broker_path = os.path.join(customer_dir, prev_borker_id) if os.path.isfile(prev_broker_path): lg.info('replacing existing broker for customer %r at position %d : %r -> %r' % ( customer_id, position, prev_borker_id, broker_id, )) try: os.remove(prev_broker_path) except: lg.exc() return False if not local_fs.WriteTextFile(broker_path, jsn.dumps(broker_info)): lg.err('failed to set broker %r at position %d for customer %r' % (broker_id, position, customer_id, )) return False known_brokers(customer_id)[position] = broker_id if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id, broker_info=broker_info) return True
def _on_data(self, newpacket): import os from twisted.internet import reactor # @UnresolvedImport from logs import lg from lib import jsn from system import bpio from main import settings from userid import my_id from userid import global_id from contacts import contactsdb from p2p import p2p_service from storage import accounting if newpacket.OwnerID == my_id.getLocalID(): # this Data belong to us, SKIP return False if not contactsdb.is_customer(newpacket.OwnerID): # SECURITY # TODO: process files from another customer : glob_path['idurl'] lg.warn("skip, %s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID)) # p2p_service.SendFail(newpacket, 'not a customer') return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'empty filename') return False dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.err("can not create sub dir %s" % dirname) p2p_service.SendFail(newpacket, 'write error') return False data = newpacket.Serialize() donated_bytes = settings.getDonatedBytes() accounting.check_create_customers_quotas(donated_bytes) space_dict = accounting.read_customers_quotas() if newpacket.OwnerID not in list(space_dict.keys()): lg.err("no info about donated space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no info about donated space') return False used_space_dict = accounting.read_customers_usage() if newpacket.OwnerID in list(used_space_dict.keys()): try: bytes_used_by_customer = int( used_space_dict[newpacket.OwnerID]) bytes_donated_to_customer = int(space_dict[newpacket.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len( data): lg.warn("no free space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no free space') return False except: lg.exc() if not bpio.WriteBinaryFile(filename, data): lg.err("can not write to %s" % str(filename)) p2p_service.SendFail(newpacket, 'write error') return False # Here Data() packet was stored as it is on supplier node (current machine) sz = len(data) del data lg.out(self.debug_level, "service_supplier._on_data %r" % newpacket) lg.out( self.debug_level, " from [ %s | %s ]" % ( newpacket.OwnerID, newpacket.CreatorID, )) lg.out(self.debug_level, " saved with %d bytes to %s" % ( sz, filename, )) p2p_service.SendAck(newpacket, str(len(newpacket.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) # @UndefinedVariable if self.publish_event_supplier_file_modified: from main import events events.send('supplier-file-modified', data=dict( action='write', glob_path=glob_path['path'], owner_id=newpacket.OwnerID, )) return True
def load_groups(): loaded_brokers = 0 loaded_groups = 0 service_dir = settings.ServiceDir('service_private_groups') groups_dir = os.path.join(service_dir, 'groups') if not os.path.isdir(groups_dir): bpio._dirs_make(groups_dir) brokers_dir = os.path.join(service_dir, 'brokers') if not os.path.isdir(brokers_dir): bpio._dirs_make(brokers_dir) for group_key_id in os.listdir(groups_dir): latest_group_key_id = my_keys.latest_key_id(group_key_id) latest_group_path = os.path.join(groups_dir, latest_group_key_id) if latest_group_key_id != group_key_id: lg.info('going to rename rotated group key: %r -> %r' % (group_key_id, latest_group_key_id, )) old_group_path = os.path.join(groups_dir, group_key_id) try: os.rename(old_group_path, latest_group_path) except: lg.exc() continue latest_group_info = jsn.loads_text(local_fs.ReadTextFile(latest_group_path)) if not latest_group_info: lg.err('was not able to load group info from %r' % latest_group_path) continue active_groups()[latest_group_key_id] = latest_group_info loaded_groups += 1 for customer_id in os.listdir(brokers_dir): latest_customer_id = global_id.latest_glob_id(customer_id) latest_customer_dir = os.path.join(brokers_dir, latest_customer_id) if latest_customer_id != customer_id: lg.info('going to rename rotated customer id: %r -> %r' % (customer_id, latest_customer_id, )) old_customer_dir = os.path.join(brokers_dir, customer_id) try: bpio.move_dir_recursive(old_customer_dir, latest_customer_dir) bpio.rmdir_recursive(old_customer_dir) except: lg.exc() continue for broker_id in os.listdir(latest_customer_dir): if latest_customer_id not in known_brokers(): known_brokers(latest_customer_id) latest_broker_id = global_id.latest_glob_id(broker_id) latest_broker_path = os.path.join(latest_customer_dir, latest_broker_id) if latest_broker_id != broker_id: lg.info('going to rename rotated broker id: %r -> %r' % (broker_id, latest_broker_id, )) old_broker_path = os.path.join(latest_customer_dir, broker_id) try: os.rename(old_broker_path, latest_broker_path) except: lg.exc() continue latest_broker_info = jsn.loads_text(local_fs.ReadTextFile(latest_broker_path)) if not latest_broker_info: lg.err('was not able to load broker info from %r' % latest_broker_path) continue existing_broker_id = known_brokers(latest_customer_id)[int(latest_broker_info['position'])] if existing_broker_id: if os.path.isfile(latest_broker_path): lg.err('found duplicated broker for customer %r on position %d, erasing file %r' % ( latest_customer_id, int(latest_broker_info['position']), latest_broker_path, )) try: os.remove(latest_broker_path) except: lg.exc() continue known_brokers()[latest_customer_id][int(latest_broker_info['position'])] = latest_broker_id loaded_brokers += 1 if _Debug: lg.args(_DebugLevel, loaded_groups=loaded_groups, loaded_brokers=loaded_brokers)
def Data(request): """ This is when we 1) save my requested data to restore the backup 2) or save the customer file on our local HDD. """ if _Debug: lg.out( _DebugLevel, 'p2p_service.Data %d bytes in [%s] by %s | %s' % (len(request.Payload), request.PacketID, request.OwnerID, request.CreatorID)) # 1. this is our Data! if request.OwnerID == my_id.getLocalID(): if _Debug: lg.out( _DebugLevel, "p2p_service.Data %r for us from %s" % (request, nameurl.GetName(request.RemoteID))) if driver.is_on('service_backups'): # TODO: move this into callback settings.BackupIndexFileName() indexPacketID = global_id.MakeGlobalID( idurl=my_id.getLocalID(), path=settings.BackupIndexFileName()) if request.PacketID == indexPacketID: from storage import backup_control backup_control.IncomingSupplierBackupIndex(request) return True return False # 2. this Data is not belong to us if not driver.is_on('service_supplier'): return SendFail(request, 'supplier service is off') if not contactsdb.is_customer(request.OwnerID): # SECURITY lg.warn("%s not a customer, packetID=%s" % (request.OwnerID, request.PacketID)) SendFail(request, 'not a customer') return glob_path = global_id.ParseGlobalID(request.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID(my_id.getGlobalID() + ':' + request.PacketID) if not glob_path['path']: lg.warn("got incorrect PacketID") SendFail(request, 'incorrect PacketID') return # TODO: process files from another customer : glob_path['idurl'] filename = makeFilename(request.OwnerID, glob_path['path']) if not filename: lg.warn("got empty filename, bad customer or wrong packetID? ") SendFail(request, 'empty filename') return dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.warn("ERROR can not create sub dir " + dirname) SendFail(request, 'write error') return data = request.Serialize() donated_bytes = settings.getDonatedBytes() if not os.path.isfile(settings.CustomersSpaceFile()): bpio._write_dict(settings.CustomersSpaceFile(), {'free': donated_bytes}) if _Debug: lg.out(_DebugLevel, 'p2p_service.Data created a new space file') space_dict = bpio._read_dict(settings.CustomersSpaceFile()) if request.OwnerID not in space_dict.keys(): lg.warn("no info about donated space for %s" % request.OwnerID) SendFail(request, 'no info about donated space') return used_space_dict = bpio._read_dict(settings.CustomersUsedSpaceFile(), {}) if request.OwnerID in used_space_dict.keys(): try: bytes_used_by_customer = int(used_space_dict[request.OwnerID]) bytes_donated_to_customer = int(space_dict[request.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len(data): lg.warn("no free space for %s" % request.OwnerID) SendFail(request, 'no free space') return except: lg.exc() if not bpio.WriteFile(filename, data): lg.warn("ERROR can not write to " + str(filename)) SendFail(request, 'write error') return SendAck(request, str(len(request.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) del data if _Debug: lg.out( _DebugLevel, "p2p_service.Data saved from [%s | %s] to %s" % ( request.OwnerID, request.CreatorID, filename, ))
def _on_data(self, newpacket): import os from twisted.internet import reactor from logs import lg from system import bpio from main import settings from userid import my_id from userid import global_id from contacts import contactsdb from p2p import p2p_service if newpacket.OwnerID == my_id.getLocalID(): # this Data belong to us, SKIP return False if not contactsdb.is_customer(newpacket.OwnerID): # SECURITY lg.err("%s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID)) p2p_service.SendFail(newpacket, 'not a customer') return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False # TODO: process files from another customer : glob_path['idurl'] filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'empty filename') return False dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.err("can not create sub dir %s" % dirname) p2p_service.SendFail(newpacket, 'write error') return False data = newpacket.Serialize() donated_bytes = settings.getDonatedBytes() if not os.path.isfile(settings.CustomersSpaceFile()): bpio._write_dict(settings.CustomersSpaceFile(), { 'free': donated_bytes, }) lg.warn('created a new space file: %s' % settings.CustomersSpaceFile()) space_dict = bpio._read_dict(settings.CustomersSpaceFile()) if newpacket.OwnerID not in space_dict.keys(): lg.err("no info about donated space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no info about donated space') return False used_space_dict = bpio._read_dict(settings.CustomersUsedSpaceFile(), {}) if newpacket.OwnerID in used_space_dict.keys(): try: bytes_used_by_customer = int( used_space_dict[newpacket.OwnerID]) bytes_donated_to_customer = int(space_dict[newpacket.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len( data): lg.warn("no free space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no free space') return False except: lg.exc() if not bpio.WriteFile(filename, data): lg.err("can not write to %s" % str(filename)) p2p_service.SendFail(newpacket, 'write error') return False # Here Data() packet was stored as it is on supplier node (current machine) sz = len(data) del data lg.out( self.debug_level, "service_supplier._on_data %r saved from [%s | %s] to %s with %d bytes" % ( newpacket, newpacket.OwnerID, newpacket.CreatorID, filename, sz, )) p2p_service.SendAck(newpacket, str(len(newpacket.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) # temporary disabled # from main import events # events.send('supplier-file-modified', data=dict( # action='write', # glob_path=glob_path['path'], # owner_id=newpacket.OwnerID, # )) return True
def on_data(newpacket): if id_url.to_bin(newpacket.OwnerID) == my_id.getIDURL().to_bin(): # this Data belong to us, SKIP return False # if not contactsdb.is_customer(newpacket.OwnerID): # # SECURITY # # TODO: process files from another customer : glob_path['idurl'] # lg.warn("skip, %s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID)) # # p2p_service.SendFail(newpacket, 'not a customer') # return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") # p2p_service.SendFail(newpacket, 'incorrect path') return False authorized_idurl = verify_packet_ownership(newpacket) if authorized_idurl is None: lg.err("ownership verification failed for %r" % newpacket) # p2p_service.SendFail(newpacket, 'ownership verification failed') return False filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") # p2p_service.SendFail(newpacket, 'empty filename') return False dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.err("can not create sub dir %s" % dirname) p2p_service.SendFail(newpacket, 'write error', remote_idurl=authorized_idurl) return False data = newpacket.Serialize() donated_bytes = settings.getDonatedBytes() accounting.check_create_customers_quotas(donated_bytes) space_dict, _ = accounting.read_customers_quotas() if newpacket.OwnerID.to_bin() not in list(space_dict.keys()): lg.err("customer space is broken, no info about donated space for %s" % newpacket.OwnerID) p2p_service.SendFail( newpacket, 'customer space is broken, no info about donated space', remote_idurl=authorized_idurl) return False used_space_dict = accounting.read_customers_usage() if newpacket.OwnerID.to_bin() in list(used_space_dict.keys()): try: bytes_used_by_customer = int( used_space_dict[newpacket.OwnerID.to_bin()]) bytes_donated_to_customer = int( space_dict[newpacket.OwnerID.to_bin()]) if bytes_donated_to_customer - bytes_used_by_customer < len(data): lg.warn("no free space left for customer data: %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no free space left for customer data', remote_idurl=authorized_idurl) return False except: lg.exc() if not bpio.WriteBinaryFile(filename, data): lg.err("can not write to %s" % str(filename)) p2p_service.SendFail(newpacket, 'write error', remote_idurl=authorized_idurl) return False # Here Data() packet was stored as it is on supplier node (current machine) del data p2p_service.SendAck(newpacket, response=strng.to_text(len(newpacket.Payload)), remote_idurl=authorized_idurl) reactor.callLater(0, local_tester.TestSpaceTime) # @UndefinedVariable # if self.publish_event_supplier_file_modified: # TODO: must remove that actually # from main import events # events.send('supplier-file-modified', data=dict( # action='write', # glob_path=glob_path['path'], # owner_id=newpacket.OwnerID, # )) return True
def Data(request): """ This is when we 1) save my requested data to restore the backup 2) or save the customer file on our local HDD. """ # 1. this is our Data! if request.OwnerID == my_id.getLocalID(): if _Debug: lg.out(_DebugLevel, "p2p_service.Data %r for us from %s" % (request, nameurl.GetName(request.RemoteID))) if driver.is_started("service_backups"): if request.PacketID in [settings.BackupIndexFileName()]: from storage import backup_control backup_control.IncomingSupplierBackupIndex(request) return True return False # 2. this Data is not belong to us if not driver.is_started("service_supplier"): return SendFail(request, "supplier service is off") if not contactsdb.is_customer(request.OwnerID): # SECURITY lg.warn("%s not a customer, packetID=%s" % (request.OwnerID, request.PacketID)) SendFail(request, "not a customer") return filename = makeFilename(request.OwnerID, request.PacketID) if filename == "": lg.warn("got empty filename, bad customer or wrong packetID? ") SendFail(request, "empty filename") return dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.warn("ERROR can not create sub dir " + dirname) SendFail(request, "write error") return data = request.Serialize() donated_bytes = settings.getDonatedBytes() if not os.path.isfile(settings.CustomersSpaceFile()): bpio._write_dict(settings.CustomersSpaceFile(), {"free": donated_bytes}) if _Debug: lg.out(_DebugLevel, "p2p_service.Data created a new space file") space_dict = bpio._read_dict(settings.CustomersSpaceFile()) if request.OwnerID not in space_dict.keys(): lg.warn("no info about donated space for %s" % request.OwnerID) SendFail(request, "no info about donated space") return used_space_dict = bpio._read_dict(settings.CustomersUsedSpaceFile(), {}) if request.OwnerID in used_space_dict.keys(): try: bytes_used_by_customer = int(used_space_dict[request.OwnerID]) bytes_donated_to_customer = int(space_dict[request.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len(data): lg.warn("no free space for %s" % request.OwnerID) SendFail(request, "no free space") return except: lg.exc() if not bpio.WriteFile(filename, data): lg.warn("ERROR can not write to " + str(filename)) SendFail(request, "write error") return SendAck(request, str(len(request.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) del data if _Debug: lg.out( _DebugLevel, "p2p_service.Data saved from [%s/%s] to %s" % (nameurl.GetName(request.OwnerID), nameurl.GetName(request.CreatorID), filename), )