def on_tray_icon_command(cmd): from main import shutdowner from services import driver from p2p import network_connector lg.out(2, 'on_tray_icon_command %s' % cmd) if cmd == 'exit': # SendCommandToGUI('exit') shutdowner.A('stop', 'exit') elif cmd == 'restart': # SendCommandToGUI('exit') appList = bpio.find_process(['bpgui.', ]) if len(appList) > 0: shutdowner.A('stop', 'restartnshow') # ('restart', 'show')) else: shutdowner.A('stop', 'restart') # ('restart', '')) elif cmd == 'reconnect': if driver.is_started('service_network'): network_connector.A('reconnect') elif cmd == 'show': show() elif cmd == 'hide': pass # SendCommandToGUI('exit') elif cmd == 'toolbar': pass # SendCommandToGUI('toolbar') else: lg.warn('wrong command: ' + str(cmd))
def ListFiles(request): """ We will want to use this to see what needs to be resent, and expect normal case is very few missing. This is to build the ``Files()`` we are holding for a customer. """ if not driver.is_started("service_supplier"): return SendFail(request, "supplier service is off") MyID = my_id.getLocalID() RemoteID = request.OwnerID PacketID = request.PacketID Payload = request.Payload if _Debug: lg.out( _DebugLevel, "p2p_service.ListFiles from [%s], format is %s" % (nameurl.GetName(request.OwnerID), Payload) ) custdir = settings.getCustomersFilesDir() ownerdir = os.path.join(custdir, nameurl.UrlFilename(request.OwnerID)) if not os.path.isdir(ownerdir): if _Debug: lg.out(_DebugLevel, "p2p_service.ListFiles did not find customer dir " + ownerdir) src = PackListFiles("", Payload) result = signed.Packet(commands.Files(), MyID, MyID, PacketID, src, RemoteID) gateway.outbox(result) return result plaintext = TreeSummary(ownerdir) if _Debug: lg.out(_DebugLevel + 4, "\n%s" % (plaintext)) src = PackListFiles(plaintext, Payload) result = signed.Packet(commands.Files(), MyID, MyID, PacketID, src, RemoteID) gateway.outbox(result) return result
def _ping(self): from services import driver if driver.is_started('service_identity_propagate'): from p2p import contact_status from p2p import propagate for customer_idurl in contact_status.listOfflineCustomers(): propagate.SendToID(customer_idurl, wide=True)
def _on_my_suppliers_all_hired(self, evt): from logs import lg from services import driver if driver.is_enabled('service_data_motion'): if not driver.is_started('service_data_motion'): lg.info('all my suppliers are hired, starting service_data_motion()') driver.start_single('service_data_motion')
def _on_my_suppliers_yet_not_hired(self, evt): from logs import lg from services import driver if driver.is_enabled('service_list_files'): if driver.is_started('service_list_files'): lg.info('my suppliers failed to hire, stopping service_list_files()') driver.stop_single('service_list_files')
def _list_active_connections(params): result = [] if not driver.is_started('service_gateway'): return {'result': result, } from transport import gateway result = [] wanted_protos = params.get('protos', []) if not wanted_protos: wanted_protos = gateway.list_active_transports() for proto in wanted_protos: for connection in gateway.list_active_sessions(proto): item = { 'status': 'unknown', 'state': 'unknown', 'proto': proto, 'host': 'unknown', 'idurl': 'unknown', 'bytes_sent': 0, 'bytes_received': 0, } if proto == 'tcp': if hasattr(connection, 'stream'): try: host = '%s:%s' % (connection.peer_address[0], connection.peer_address[1]) except: host = 'unknown' item.update({ 'status': 'active', 'state': connection.state, 'host': host, 'idurl': connection.peer_idurl or '', 'bytes_sent': connection.total_bytes_sent, 'bytes_received': connection.total_bytes_received, }) else: try: host = '%s:%s' % (connection.connection_address[0], connection.connection_address[1]) except: host = 'unknown' item.update({ 'status': 'connecting', 'host': host, }) elif proto == 'udp': try: host = '%s:%s' % (connection.peer_address[0], connection.peer_address[1]) except: host = 'unknown' item.update({ 'status': 'active', 'state': connection.state, 'host': host, 'idurl': connection.peer_idurl or '', 'bytes_sent': connection.bytes_sent, 'bytes_received': connection.bytes_received, }) result.append(item) return {'result': result, }
def _on_my_suppliers_all_hired(self, evt): from logs import lg from services import driver if driver.is_enabled('service_list_files'): if not driver.is_started('service_list_files'): lg.info('all my suppliers are hired, starting service_list_files()') driver.start_single('service_list_files') from customer import list_files_orator list_files_orator.A('need-files')
def _on_tray_icon_command(self, cmd): lg.out(2, "initializer._on_tray_icon_command : [%s]" % cmd) try: if cmd == "exit": shutdowner.A("stop", "exit") elif cmd == "restart": # appList = bpio.find_process(['bpgui.',]) # if len(appList) > 0: # shutdowner.A('stop', 'restartnshow') # ('restart', 'show')) # else: # shutdowner.A('stop', 'restart') # ('restart', '')) shutdowner.A("stop", "restart") elif cmd == "reconnect": from p2p import network_connector if driver.is_started("service_network"): network_connector.A("reconnect") elif cmd == "show": from web import control control.show() elif cmd == "sync": try: from updates import git_proc from system import tray_icon def _sync_callback(result): if result == "error": tray_icon.draw_icon("error") reactor.callLater(5, tray_icon.restore_icon) return elif result == "new-data": tray_icon.draw_icon("updated") reactor.callLater(5, tray_icon.restore_icon) return tray_icon.restore_icon() tray_icon.draw_icon("sync") git_proc.sync(_sync_callback) except: lg.exc() elif cmd == "hide": pass elif cmd == "toolbar": pass else: lg.warn("wrong command: " + str(cmd)) except: lg.exc()
def CancelService(request, info): if _Debug: lg.out(_DebugLevel, "p2p_service.CancelService") words = request.Payload.split(" ") if len(words) < 1: lg.warn("got wrong payload in %s" % request) return SendFail(request, "wrong payload") service_name = words[0] # TODO: - temporary keep that for backward compatibility if service_name == "storage": if not driver.is_started("service_supplier"): return SendFail(request, "supplier service is off") return driver.cancel("service_supplier", request, info) if not driver.is_exist(service_name): lg.warn("got wrong payload in %s" % request) return SendFail(request, "service %s not exist" % service_name) if not driver.is_started(service_name): return SendFail(request, "service %s is off" % service_name) return driver.cancel(service_name, request, info)
def _list_active_streams(params): result = [] if not driver.is_started('service_gateway'): return {'result': result, } from transport import gateway result = [] wanted_protos = params.get('protos', []) if not wanted_protos: wanted_protos = gateway.list_active_transports() for proto in wanted_protos: for stream in gateway.list_active_streams(proto): item = { 'proto': proto, 'id': '', 'type': '', 'bytes_current': -1, 'bytes_total': -1, 'progress': '0%', } if proto == 'tcp': if hasattr(stream, 'bytes_received'): item.update({ 'id': stream.file_id, 'type': 'in', 'bytes_current': stream.bytes_received, 'bytes_total': stream.size, 'progress': misc.value2percent(stream.bytes_received, stream.size, 0) }) elif hasattr(stream, 'bytes_sent'): item.update({ 'id': stream.file_id, 'type': 'out', 'bytes_current': stream.bytes_sent, 'bytes_total': stream.size, 'progress': misc.value2percent(stream.bytes_sent, stream.size, 0) }) elif proto == 'udp': if hasattr(stream.consumer, 'bytes_received'): item.update({ 'id': stream.stream_id, 'type': 'in', 'bytes_current': stream.consumer.bytes_received, 'bytes_total': stream.consumer.size, 'progress': misc.value2percent(stream.consumer.bytes_received, stream.consumer.size, 0) }) elif hasattr(stream.consumer, 'bytes_sent'): item.update({ 'id': stream.stream_id, 'type': 'out', 'bytes_current': stream.consumer.bytes_sent, 'bytes_total': stream.consumer.size, 'progress': misc.value2percent(stream.consumer.bytes_sent, stream.consumer.size, 0) }) result.append(item) return {'result': result, }
def _on_my_storage_ready(self, evt): from logs import lg from services import driver if self.starting_deferred: if not self.starting_deferred.called: self.starting_deferred.callback(True) self.starting_deferred = None if driver.is_enabled('service_my_data'): if not driver.is_started('service_my_data'): lg.info('my storage is ready, starting service_my_data()') driver.start_single('service_my_data')
def _on_tray_icon_command(self, cmd): lg.out(2, "initializer._on_tray_icon_command : [%s]" % cmd) try: if cmd == 'exit': shutdowner.A('stop', 'exit') elif cmd == 'restart': # appList = bpio.find_process(['bpgui.',]) # if len(appList) > 0: # shutdowner.A('stop', 'restartnshow') # ('restart', 'show')) # else: # shutdowner.A('stop', 'restart') # ('restart', '')) shutdowner.A('stop', 'restart') elif cmd == 'reconnect': from p2p import network_connector if driver.is_started('service_network'): network_connector.A('reconnect') elif cmd == 'show': from web import control control.show() elif cmd == 'sync': try: from updates import git_proc from system import tray_icon def _sync_callback(result): if result == 'error': tray_icon.draw_icon('error') reactor.callLater(5, tray_icon.restore_icon) return elif result == 'new-data': tray_icon.draw_icon('updated') reactor.callLater(5, tray_icon.restore_icon) return tray_icon.restore_icon() tray_icon.draw_icon('sync') git_proc.sync(_sync_callback) except: lg.exc() elif cmd == 'hide': pass elif cmd == 'toolbar': pass else: lg.warn('wrong command: ' + str(cmd)) except: lg.exc()
def process(json_request): lg.out(20, 'filemanager_api.process %s' % json_request) if not driver.is_started('service_backups'): return {'result': { "success": False, "error": "network [service_backups] is not started: %s" % ( driver.services().get('service_backups', '!!! not found !!!'))}} mode = '' result = {} try: if isinstance(json_request, str) or isinstance(json_request, unicode): import json json_request = json.loads(json_request) mode = json_request['params']['mode'] if mode == 'config': result = _config(json_request['params']) elif mode == 'stats': result = _stats(json_request['params']) elif mode == 'list': result = _list(json_request['params']) elif mode == 'listlocal': result = _list_local(json_request['params']) elif mode == 'listall': result = _list_all(json_request['params']) elif mode == 'upload': result = _upload(json_request['params']) elif mode == 'delete': result = _delete(json_request['params']) elif mode == 'deleteversion': result = _delete_version(json_request['params']) elif mode == 'download': result = _download(json_request['params']) elif mode == 'tasks': result = _list_active_tasks(json_request['params']) elif mode == 'packets': result = _list_in_out_packets(json_request['params']) elif mode == 'connections': result = _list_active_connections(json_request['params']) elif mode == 'streams': result = _list_active_streams(json_request['params']) elif mode == 'debuginfo': result = _debuginfo(json_request['params']) else: result = {"result": {"success": False, "error": 'filemanager method %s not found' % mode}} except Exception as exc: lg.exc() descr = str(sys.exc_info()[0].__name__) + ': ' + str(sys.exc_info()[1]) result = {"result": {"success": False, "error": descr}} # lg.out(4, ' ERROR unknown mode: %s' % mode) lg.out(20, ' %s' % pprint.pformat(result)) return result
def _on_my_storage_not_ready_yet(self, evt): from logs import lg from services import driver if self.starting_deferred: if not self.starting_deferred.called: self.starting_deferred.errback( Exception('my storage is not ready yet')) self.starting_deferred = None if driver.is_enabled('service_my_data'): if not driver.is_started('service_my_data'): lg.info( 'my storage is not ready yet, stopping service_my_data()') driver.stop_single('service_my_data')
def Save(filepath=None): """ Save index data base to local file ( call ``WriteIndex()`` ) and notify "index_synchronizer()" state machine. """ global _LoadingFlag if _LoadingFlag: return False commit() WriteIndex(filepath) if driver.is_started('service_backup_db'): from storage import index_synchronizer index_synchronizer.A('push')
def init(self): """ Method to initialize additional variables and flags at creation of the state machine. """ self.listen_port = None self.my_id = None self.my_address = None self.options = {} if driver.is_started('service_my_ip_port'): self.my_address = stun_client.A().getMyExternalAddress() self.notified = False self.IncomingPosition = -1
def Retrieve(request): """ Customer is asking us for data he previously stored with us. We send with ``outboxNoAck()`` method because he will ask again if he does not get it """ # TODO: rename to RetreiveData() if not driver.is_started("service_supplier"): return SendFail(request, "supplier service is off") if not contactsdb.is_customer(request.OwnerID): lg.warn("had unknown customer " + request.OwnerID) SendFail(request, "not a customer") return filename = makeFilename(request.OwnerID, request.PacketID) if filename == "": lg.warn("had empty filename") SendFail(request, "empty filename") return if not os.path.exists(filename): lg.warn("did not find requested file locally " + filename) SendFail(request, "did not find requested file locally") return if not os.access(filename, os.R_OK): lg.warn("no read access to requested packet " + filename) SendFail(request, "no read access to requested packet") return data = bpio.ReadBinaryFile(filename) if not data: lg.warn("empty data on disk " + filename) SendFail(request, "empty data on disk") return outpacket = signed.Unserialize(data) del data if outpacket is None: lg.warn("Unserialize fails, not Valid packet " + filename) SendFail(request, "unserialize fails") return if not outpacket.Valid(): lg.warn("unserialized packet is not Valid " + filename) SendFail(request, "unserialized packet is not Valid") return if _Debug: lg.out( _DebugLevel, "p2p_service.Retrieve sending %r back to %s" % (outpacket, nameurl.GetName(outpacket.CreatorID)), ) gateway.outbox(outpacket, target=outpacket.CreatorID)
def _on_my_storage_ready(self, evt): from logs import lg from main import listeners from services import driver from storage import backup_fs if self.starting_deferred: if not self.starting_deferred.called: self.starting_deferred.callback(True) self.starting_deferred = None if listeners.is_populate_requered('private_file'): listeners.populate_later().remove('private_file') backup_fs.populate_private_files() if driver.is_enabled('service_my_data'): if not driver.is_started('service_my_data'): lg.info('my storage is ready, starting service_my_data()') driver.start_single('service_my_data')
def RequestService(request, info): if len(request.Payload) > 1024 * 10: return SendFail(request, "too long payload") words = request.Payload.split(" ") if len(words) < 1: lg.warn("got wrong payload in %s" % request) return SendFail(request, "wrong payload") service_name = words[0] if _Debug: lg.out(_DebugLevel, "p2p_service.RequestService %s : %s" % (request.OwnerID, service_name)) if not driver.is_exist(service_name): lg.warn("got wrong payload in %s" % service_name) return SendFail(request, "service %s not exist" % service_name) if not driver.is_started(service_name): return SendFail(request, "service %s is off" % service_name) return driver.request(service_name, request, info)
def process(newpacket, info): if not driver.is_started("service_p2p_hookups"): if _Debug: lg.out(_DebugLevel, "packet_in.process SKIP incoming packet, service_p2p_hookups is not started") return handled = False if _Debug: lg.out( _DebugLevel, "packet_in.process %s from %s://%s : %s" % (str(newpacket), info.proto, info.host, info.status) ) from p2p import commands from p2p import p2p_service if newpacket.Command == commands.Identity() and newpacket.RemoteID == my_id.getLocalID(): # contact sending us current identity we might not have # so we handle it before check that packet is valid # because we might not have his identity on hands and so can not verify the packet # so we check that his Identity is valid and save it into cache # than we check the packet to be valid too. if not p2p_service.Identity(newpacket): return # check that signed by a contact of ours if not newpacket.Valid(): lg.warn("new packet from %s://%s is NOT VALID: %r" % (info.proto, info.host, newpacket)) return for p in packet_out.search_by_response_packet(newpacket, info.proto, info.host): p.automat("inbox-packet", (newpacket, info)) handled = True handled = callback.run_inbox_callbacks(newpacket, info, info.status, info.error_message) or handled if not handled and newpacket.Command not in [commands.Ack(), commands.Fail()]: if _Debug: lg.out(_DebugLevel - 8, " incoming %s from [%s://%s]" % (newpacket, info.proto, info.host)) lg.out(_DebugLevel - 8, " NOT HANDLED !!!") else: if _Debug: history().append( { "time": newpacket.Date, "command": newpacket.Command, "packet_id": newpacket.PacketID, "creator_id": newpacket.CreatorID, "owner_id": newpacket.OwnerID, "remote_id": newpacket.RemoteID, "payload": len(newpacket.Payload), "address": "%s://%s" % (info.proto, info.host), } )
def safe_stun(udp_port=None, dht_port=None, result_defer=None): from twisted.internet.defer import Deferred result = result_defer or Deferred() if driver.is_started('service_entangled_dht'): if dht_service.node()._joinDeferred and not dht_service.node()._joinDeferred.called: dht_service.node()._joinDeferred.addCallback(lambda ok: safe_stun(udp_port=udp_port, dht_port=dht_port, result_defer=result)) dht_service.node()._joinDeferred.addErrback(result.errback) return result if not driver.is_on('service_entangled_dht'): result.errback(Exception('service_entangled_dht() is not started')) return result try: settings.init() dht_port = dht_port or settings.getDHTPort() udp_port = udp_port or settings.getUDPPort() if dht_port: dht_service.init(dht_port) d = dht_service.connect() if udp_port: udp.listen(udp_port) def _cb(cod, typ, ip, details): # A('shutdown') result.callback({ 'result': cod, # 'stun-success' or 'stun-failed' 'type': typ, 'ip': ip, 'details': details, }) def _go(live_nodes): A('init', udp_port) A('start', _cb) d.addCallback(_go) d.addErrback(lambda err: result.callback(dict(ip='127.0.0.1', errors=[str(err), ]))) except Exception as exc: lg.exc() result.callback(dict(ip='127.0.0.1', errors=[str(exc), ])) return result return result
def DeleteFile(request): """ Delete one ore multiple files or folders on my machine. """ if not driver.is_started("service_supplier"): return SendFail(request, "supplier service is off") if request.Payload == "": ids = [request.PacketID] else: ids = request.Payload.split("\n") filescount = 0 dirscount = 0 for pathID in ids: filename = makeFilename(request.OwnerID, pathID) if filename == "": filename = constructFilename(request.OwnerID, pathID) if not os.path.exists(filename): lg.warn( "had unknown customer: %s or pathID is not correct or not exist: %s" % (nameurl.GetName(request.OwnerID), pathID) ) return SendFail(request, "not a customer, or file not found") if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) if _Debug: lg.out( _DebugLevel, "p2p_service.DeleteFile from [%s] with %d IDs, %d files and %d folders were removed" % (nameurl.GetName(request.OwnerID), len(ids), filescount, dirscount), ) SendAck(request)
def IncomingSupplierBackupIndex(newpacket): """ Called by ``p2p.p2p_service`` when a remote copy of our local index data base ( in the "Data" packet ) is received from one of our suppliers. The index is also stored on suppliers to be able to restore it. """ b = encrypted.Unserialize(newpacket.Payload) if b is None: lg.out(2, 'backup_control.IncomingSupplierBackupIndex ERROR reading data from %s' % newpacket.RemoteID) return try: session_key = key.DecryptLocalPK(b.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, b.EncryptedData) inpt = cStringIO.StringIO(padded_data[:int(b.Length)]) supplier_revision = inpt.readline().rstrip('\n') if supplier_revision: supplier_revision = int(supplier_revision) else: supplier_revision = -1 inpt.seek(0) except: lg.out(2, 'backup_control.IncomingSupplierBackupIndex ERROR reading data from %s' % newpacket.RemoteID) lg.out(2, '\n' + padded_data) lg.exc() try: inpt.close() except: pass return if driver.is_started('service_backup_db'): from storage import index_synchronizer index_synchronizer.A('index-file-received', (newpacket, supplier_revision)) if revision() < supplier_revision: ReadIndex(inpt) backup_fs.Scan() backup_fs.Calculate() WriteIndex() control.request_update() lg.out(2, 'backup_control.IncomingSupplierBackupIndex updated to revision %d from %s' % ( revision(), newpacket.RemoteID)) inpt.close()
def DeleteBackup(request): """ Delete one or multiple backups on my machine. """ if not driver.is_started("service_supplier"): return SendFail(request, "supplier service is off") if request.Payload == "": ids = [request.PacketID] else: ids = request.Payload.split("\n") count = 0 for backupID in ids: filename = makeFilename(request.OwnerID, backupID) if filename == "": filename = constructFilename(request.OwnerID, backupID) if not os.path.exists(filename): lg.warn("had unknown customer " + request.OwnerID + " or backupID " + backupID) return SendFail(request, "not a customer, or file not found") if os.path.isdir(filename): try: bpio._dir_remove(filename) count += 1 except: lg.exc() elif os.path.isfile(filename): try: os.remove(filename) count += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) SendAck(request) if _Debug: lg.out( _DebugLevel, "p2p_service.DeleteBackup from [%s] with %d IDs, %d were removed" % (nameurl.GetName(request.OwnerID), len(ids), count), )
def doSuppliersRequestIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersRequestIndexFile') if driver.is_started('service_backups'): from storage import backup_control self.current_local_revision = backup_control.revision() else: self.current_local_revision = -1 self.latest_supplier_revision = -1 self.requesting_suppliers.clear() self.requested_suppliers_number = 0 packetID = settings.BackupIndexFileName() localID = my_id.getLocalID() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue newpacket = signed.Packet( commands.Retrieve(), localID, localID, packetID, '', supplierId) pkt_out = gateway.outbox(newpacket, callbacks={ commands.Data(): self._on_supplier_response, commands.Fail(): self._on_supplier_response, }) if pkt_out: self.requesting_suppliers.add(supplierId) self.requested_suppliers_number += 1 if _Debug: lg.out(_DebugLevel, ' %s sending to %s' % (pkt_out, nameurl.GetName(supplierId)))
def doNotifyFinished(self, arg): if driver.is_started('service_backups'): from storage import backup_monitor backup_monitor.A('fire-hire-finished')
def inbox(newpacket, info, status, error_message): """ """ if newpacket.CreatorID != my_id.getLocalID() and newpacket.RemoteID != my_id.getLocalID(): # packet is NOT for us, skip return False commandhandled = False if newpacket.Command == commands.Ack(): # a response from remote node, typically handled in other places Ack(newpacket, info) commandhandled = False elif newpacket.Command == commands.Fail(): # some operation was failed on other side Fail(newpacket) commandhandled = False elif newpacket.Command == commands.Retrieve(): # retrieve some packet customer stored with us Retrieve(newpacket) commandhandled = True elif newpacket.Command == commands.RequestService(): # other node send us a request to get some service RequestService(newpacket, info) commandhandled = True elif newpacket.Command == commands.CancelService(): # other node wants to stop the service we gave him CancelService(newpacket, info) commandhandled = True elif newpacket.Command == commands.Data(): # new packet to store for customer commandhandled = Data(newpacket) elif newpacket.Command == commands.ListFiles(): # customer wants list of their files ListFiles(newpacket) commandhandled = True elif newpacket.Command == commands.Files(): # supplier sent us list of files Files(newpacket, info) commandhandled = True elif newpacket.Command == commands.DeleteFile(): # will Delete a customer file for them DeleteFile(newpacket) commandhandled = True elif newpacket.Command == commands.DeleteBackup(): # will Delete all files starting in a backup DeleteBackup(newpacket) commandhandled = True elif newpacket.Command == commands.RequestIdentity(): # contact asking for our current identity RequestIdentity(newpacket) commandhandled = True elif newpacket.Command == commands.Message(): # contact asking for our current identity if driver.is_started("service_private_messages"): from chat import message message.Message(newpacket) commandhandled = True elif newpacket.Command == commands.Correspondent(): # contact asking for our current identity Correspondent(newpacket) commandhandled = True elif newpacket.Command == commands.Broadcast(): # handled by service_broadcasting() Broadcast(newpacket, info) commandhandled = False elif newpacket.Command == commands.Coin(): # handled by service_accountant() Coin(newpacket, info) commandhandled = False elif newpacket.Command == commands.RetreiveCoin(): # handled by service_accountant() RetreiveCoin(newpacket, info) commandhandled = False return commandhandled
def ReadRawListFiles(supplierNum, listFileText): """ Read ListFiles packet for given supplier and build a "remote" matrix. All lines are something like that:: Findex 5456 D0 -1 D0/1 -1 V0/1/F20090709034221PM 3 0-1000 7463434 V0/1/F20090709034221PM 3 0-1000 7463434 D0/0/123/4567 -1 V0/0/123/4567/F20090709034221PM 3 0-11 434353 missing Data:1,3 V0/0/123/4/F20090709012331PM 3 0-5 434353 missing Data:1,3 Parity:0,1,2 First character can be:: "F" for files "D" for folders "V" for backed up data """ from storage import backup_control if driver.is_started("service_backup_db"): from storage import index_synchronizer is_in_sync = index_synchronizer.is_synchronized() and backup_control.revision() > 0 else: is_in_sync = False backups2remove = set() paths2remove = set() oldfiles = ClearSupplierRemoteInfo(supplierNum) newfiles = 0 lg.out( 8, "backup_matrix.ReadRawListFiles %d bytes to read from supplier #%d, rev:%d, %s, is_in_sync=%s" % (len(listFileText), supplierNum, backup_control.revision(), index_synchronizer.A(), is_in_sync), ) inpt = cStringIO.StringIO(listFileText) while True: line = inpt.readline() if line == "": break typ = line[0] line = line[1:] line = line.rstrip("\n") if line.strip() == "": continue # also don't consider the identity a backup, if line.find("http://") != -1 or line.find(".xml") != -1: continue lg.out(8, " %s:{%s}" % (typ, line)) if typ == "F": # we don't have this path in the index # so we have several cases: # 1. this is old file and we need to remove it and all its backups # 2. we loose our local index and did not restore it from one of suppliers yet # 3. we did restore our account and did not restore the index yet # 4. we lost our index at all and we do not have nor local nor remote copy # what to do now: # - in first case we just need to remove the file from remote supplier # - in other cases we must keep all remote data and believe we can restore the index # and get all file names and backed up data # how to recognize that? how to be sure we have the correct index? # because it should be empty right after we recover our account # or we may loose it if the local index file were lost # the first idea: check index_synchronizer() state - IN_SYNC means index is fine # the second idea: check revision number of the local index - 0 means we have no index yet try: pth, filesz = line.split(" ") filesz = int(filesz) except: pth = line filesz = -1 if not backup_fs.IsFileID(pth): # remote supplier have some file - but we don't have it in the index if pth.strip("/") in [settings.BackupIndexFileName()]: # this is the index file saved on remote supplier # let's remember its size and put it in the backup_fs item = backup_fs.FSItemInfo(pth.strip("/"), pth.strip("/"), backup_fs.FILE) item.size = filesz backup_fs.SetFile(item) else: if is_in_sync: # so we have some modifications in the index - it is not empty! # index_synchronizer() did his job - so we have up to date index on hands # now we are sure that this file is old and must be removed from remote site paths2remove.add(pth) lg.out(8, " F%s - remove, not found in the index" % pth) # what to do now? let's hope we still can restore our index and this file is our remote data elif typ == "D": try: pth = line.split(" ")[0] except: pth = line if not backup_fs.ExistsID(pth): if is_in_sync: paths2remove.add(pth) lg.out(8, " D%s - remove, not found in the index" % pth) elif typ == "V": # minimum is 4 words: "0/0/F20090709034221PM", "3", "0-1000" "123456" words = line.split(" ") if len(words) < 4: lg.warn("incorrect line:[%s]" % line) continue try: pathID, versionName = packetid.SplitBackupID(words[0]) backupID = pathID + "/" + versionName lineSupplierNum = int(words[1]) minBlockNum, maxBlockNum = words[2].split("-") maxBlockNum = int(maxBlockNum) except: lg.warn("incorrect line:[%s]" % line) continue if lineSupplierNum != supplierNum: # this mean supplier have old files and we do not need those files backups2remove.add(backupID) lg.out(8, " V%s - remove, different supplier number" % backupID) continue iter_path = backup_fs.WalkByID(pathID) if iter_path is None: # this version is not found in the index if is_in_sync: backups2remove.add(backupID) paths2remove.add(pathID) lg.out(8, " V%s - remove, path not found in the index" % pathID) continue item, localPath = iter_path if isinstance(item, dict): try: item = item[backup_fs.INFO_KEY] except: item = None if not item or not item.has_version(versionName): if is_in_sync: backups2remove.add(backupID) lg.out(8, " V%s - remove, version is not found in the index" % backupID) continue missingBlocksSet = {"Data": set(), "Parity": set()} if len(words) > 4: # "0/0/123/4567/F20090709034221PM/0-Data" "3" "0-5" "434353" "missing" "Data:1,3" "Parity:0,1,2" if words[4].strip() != "missing": lg.warn("incorrect line:[%s]" % line) continue for missingBlocksString in words[5:]: try: dp, blocks = missingBlocksString.split(":") missingBlocksSet[dp] = set(blocks.split(",")) except: lg.exc() break if backupID not in remote_files(): remote_files()[backupID] = {} # lg.out(6, 'backup_matrix.ReadRawListFiles new remote entry for %s created in the memory' % backupID) # +1 because range(2) give us [0,1] but we want [0,1,2] for blockNum in xrange(maxBlockNum + 1): if blockNum not in remote_files()[backupID]: remote_files()[backupID][blockNum] = { "D": [0] * contactsdb.num_suppliers(), "P": [0] * contactsdb.num_suppliers(), } for dataORparity in ["Data", "Parity"]: # we set -1 if the file is missing and 1 if exist, so 0 mean "no info yet" ... smart! bit = -1 if str(blockNum) in missingBlocksSet[dataORparity] else 1 remote_files()[backupID][blockNum][dataORparity[0]][supplierNum] = bit newfiles += int((bit + 1) / 2) # this should switch -1 or 1 to 0 or 1 # save max block number for this backup if backupID not in remote_max_block_numbers(): remote_max_block_numbers()[backupID] = -1 if maxBlockNum > remote_max_block_numbers()[backupID]: remote_max_block_numbers()[backupID] = maxBlockNum # mark this backup to be repainted RepaintBackup(backupID) inpt.close() lg.out( 8, " old:%d, new:%d, backups2remove:%d, paths2remove:%d" % (oldfiles, newfiles, len(backups2remove), len(paths2remove)), ) # return list of backupID's which is too old but stored on suppliers machines return backups2remove, paths2remove
def state_changed(self, oldstate, newstate, event, arg): #global_state.set_global_state('ORATOR ' + newstate) if driver.is_started('service_backups'): from storage import backup_monitor backup_monitor.A('list_files_orator.state', newstate)
def _on_outbox_packet(self, outpacket, wide, callbacks, target=None, route=None): """ """ if not driver.is_started('service_proxy_transport'): if _Debug: lg.out(_DebugLevel, 'proxy_sender._on_outbox_packet skip because service_proxy_transport is not started') return None if proxy_receiver.A() and proxy_receiver.A().state != 'LISTEN': return self._add_pending_packet(outpacket, wide, callbacks) router_idurl = proxy_receiver.GetRouterIDURL() router_identity_obj = proxy_receiver.GetRouterIdentity() router_proto_host = proxy_receiver.GetRouterProtoHost() router_proto, router_host = router_proto_host publickey = router_identity_obj.publickey my_original_identity_src = proxy_receiver.ReadMyOriginalIdentitySource() if not router_idurl or not router_identity_obj or not router_proto_host or not my_original_identity_src: return self._add_pending_packet(outpacket, wide, callbacks) if outpacket.RemoteID == router_idurl: if _Debug: lg.out(_DebugLevel, 'proxy_sender._on_outbox_packet skip, packet addressed to router and must be sent in a usual way') return None src = '' src += my_id.getLocalID() + '\n' src += outpacket.RemoteID + '\n' src += 'wide\n' if wide else '\n' src += outpacket.Serialize() block = encrypted.Block( my_id.getLocalID(), 'routed outgoing data', 0, key.NewSessionKey(), key.SessionKeyType(), True, src, EncryptFunc=lambda inp: key.EncryptStringPK(publickey, inp)) block_encrypted = block.Serialize() newpacket = signed.Packet( commands.Relay(), outpacket.OwnerID, my_id.getLocalID(), outpacket.PacketID, block_encrypted, router_idurl) result_packet = packet_out.create( outpacket, wide=wide, callbacks=callbacks, route={ 'packet': newpacket, 'proto': router_proto, 'host': router_host, 'remoteid': router_idurl, 'description': 'Relay_%s[%s]_%s' % (outpacket.Command, outpacket.PacketID, nameurl.GetName(router_idurl)), }) self.event('outbox-packet-sent', (outpacket, newpacket, result_packet)) if _Debug: lg.out(_DebugLevel, '>>>Relay-OUT %s' % str(outpacket)) lg.out(_DebugLevel, ' sent to %s://%s with %d bytes' % ( router_proto, router_host, len(block_encrypted))) del src del block del newpacket del outpacket del router_identity_obj del router_idurl del router_proto_host return result_packet
def doNotifySuppliersChanged(self, arg): if driver.is_started('service_backups'): from storage import backup_monitor backup_monitor.A('suppliers-changed')
def Data(request): """ This is when we 1) save my requested data to restore the backup 2) or save the customer file on our local HDD. """ # 1. this is our Data! if request.OwnerID == my_id.getLocalID(): if _Debug: lg.out(_DebugLevel, "p2p_service.Data %r for us from %s" % (request, nameurl.GetName(request.RemoteID))) if driver.is_started("service_backups"): if request.PacketID in [settings.BackupIndexFileName()]: from storage import backup_control backup_control.IncomingSupplierBackupIndex(request) return True return False # 2. this Data is not belong to us if not driver.is_started("service_supplier"): return SendFail(request, "supplier service is off") if not contactsdb.is_customer(request.OwnerID): # SECURITY lg.warn("%s not a customer, packetID=%s" % (request.OwnerID, request.PacketID)) SendFail(request, "not a customer") return filename = makeFilename(request.OwnerID, request.PacketID) if filename == "": lg.warn("got empty filename, bad customer or wrong packetID? ") SendFail(request, "empty filename") return dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.warn("ERROR can not create sub dir " + dirname) SendFail(request, "write error") return data = request.Serialize() donated_bytes = settings.getDonatedBytes() if not os.path.isfile(settings.CustomersSpaceFile()): bpio._write_dict(settings.CustomersSpaceFile(), {"free": donated_bytes}) if _Debug: lg.out(_DebugLevel, "p2p_service.Data created a new space file") space_dict = bpio._read_dict(settings.CustomersSpaceFile()) if request.OwnerID not in space_dict.keys(): lg.warn("no info about donated space for %s" % request.OwnerID) SendFail(request, "no info about donated space") return used_space_dict = bpio._read_dict(settings.CustomersUsedSpaceFile(), {}) if request.OwnerID in used_space_dict.keys(): try: bytes_used_by_customer = int(used_space_dict[request.OwnerID]) bytes_donated_to_customer = int(space_dict[request.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len(data): lg.warn("no free space for %s" % request.OwnerID) SendFail(request, "no free space") return except: lg.exc() if not bpio.WriteFile(filename, data): lg.warn("ERROR can not write to " + str(filename)) SendFail(request, "write error") return SendAck(request, str(len(request.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) del data if _Debug: lg.out( _DebugLevel, "p2p_service.Data saved from [%s/%s] to %s" % (nameurl.GetName(request.OwnerID), nameurl.GetName(request.CreatorID), filename), )