def doReportSuccess(self, arg): """ Action method. """ try: min_port = min( map(lambda addr: addr[1], self.stun_results.values())) max_port = max( map(lambda addr: addr[1], self.stun_results.values())) my_ip = self.stun_results.values()[0][0] if min_port == max_port: result = ('stun-success', 'non-symmetric', my_ip, min_port) else: result = ('stun-success', 'symmetric', my_ip, self.stun_results) self.my_address = (my_ip, min_port) except: lg.exc() result = ('stun-failed', None, None, []) self.my_address = None if self.my_address: bpio.WriteFile(settings.ExternalIPFilename(), self.my_address[0]) bpio.WriteFile(settings.ExternalUDPPortFilename(), str(self.my_address[1])) if _Debug: lg.out( _DebugLevel, 'stun_client.doReportSuccess based on %d nodes: %s' % (len(self.stun_results), str(self.my_address))) if _Debug: lg.out(_DebugLevel + 10, ' %s' % str(result)) for cb in self.callbacks: cb(result[0], result[1], result[2], result[3]) self.callbacks = []
def _create_new_identity(self): """ Generate new Private key and new identity file. Reads some extra info from config files. """ login = bpio.ReadTextFile(settings.UserNameFilename()) externalIP = misc.readExternalIP( ) # bpio.ReadTextFile(settings.ExternalIPFilename()) lg.out( 4, 'id_registrator._create_new_identity %s %s ' % (login, externalIP)) key.InitMyKey() lg.out(4, ' my key is ready') ident = my_id.buildDefaultIdentity(name=login, ip=externalIP, idurls=self.free_idurls) # localIP = bpio.ReadTextFile(settings.LocalIPFilename()) my_identity_xmlsrc = ident.serialize() newfilename = settings.LocalIdentityFilename() + '.new' bpio.WriteFile(newfilename, my_identity_xmlsrc) self.new_identity = ident lg.out( 4, ' wrote %d bytes to %s' % (len(my_identity_xmlsrc), newfilename))
def register_key(key_id, openssh_input_string, keys_folder=None, output_type='openssh'): """ """ if key_id in known_keys(): lg.warn('key %s already exists' % key_id) return None key_object = unserialize_key_to_object(openssh_input_string) if not key_object: lg.warn('invalid openssh string, unserialize_key_to_object() failed') return None lg.out( 4, 'my_keys.register_key %s from %d bytes openssh_input_string' % (key_id, len(openssh_input_string))) known_keys()[key_id] = key_object if not keys_folder: keys_folder = settings.PrivateKeysDir() key_string = key_object.toString(output_type) key_filepath = os.path.join(keys_folder, key_id) bpio.WriteFile(key_filepath, key_string) if _Debug: lg.out(_DebugLevel, ' key %s saved to %s' % (key_id, key_filepath)) return key_object
def doSaveMyName(self, arg): """ Action method. """ try: login = arg['username'] except: login = arg[0] if len(arg) > 1: self.preferred_servers = map(lambda s: s.strip(), arg[1].split(',')) if not self.known_servers: self.known_servers = known_servers.by_host() if not self.preferred_servers: try: from main import config for srv in str(config.conf().getData('services/identity-propagate/preferred-servers')).split(','): if srv.strip(): self.preferred_servers.append(srv.strip()) except: pass self.min_servers = max( settings.MinimumIdentitySources(), config.conf().getInt('services/identity-propagate/min-servers') or settings.MinimumIdentitySources()) self.max_servers = min( settings.MaximumIdentitySources(), config.conf().getInt('services/identity-propagate/max-servers') or settings.MaximumIdentitySources()) lg.out(4, 'id_registrator.doSaveMyName [%s]' % login) lg.out(4, ' known_servers=%s' % self.known_servers) lg.out(4, ' preferred_servers=%s' % self.preferred_servers) lg.out(4, ' min_servers=%s' % self.min_servers) lg.out(4, ' max_servers=%s' % self.max_servers) bpio.WriteFile(settings.UserNameFilename(), login)
def _try(site, result, counter): global _WSGIListener global _WSGIPort if counter > 10: _WSGIPort = random.randint(8001, 8999) if _Debug: lg.out( _DebugLevel, ' _try port=%d counter=%d' % (_WSGIPort, counter)) try: _WSGIListener = reactor.listenTCP(_WSGIPort, site) except: if _Debug: lg.out( _DebugLevel, ' _try it seems port %d is busy' % _WSGIPort) _WSGIListener = None if _WSGIListener is None: reactor.callLater(0.5, _try, site, result, counter + 1) return bpio.WriteFile(settings.LocalWSGIPortFilename(), str(_WSGIPort)) if _Debug: lg.out(_DebugLevel, ' _try STARTED on port %d' % _WSGIPort) result.callback(_WSGIPort)
def doSavePacket(self, NewPacket): glob_path = global_id.ParseGlobalID(NewPacket.PacketID, detect_version=True) packetID = global_id.CanonicalID(NewPacket.PacketID) customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull( packetID) if dataORparity == 'Data': self.OnHandData[SupplierNumber] = True elif NewPacket.DataOrParity() == 'Parity': self.OnHandParity[SupplierNumber] = True filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path']) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): try: bpio._dirs_make(dirpath) except: lg.exc() # either way the payload of packet is saved if not bpio.WriteFile(filename, NewPacket.Payload): lg.warn("unable to write to %s" % filename) return if self.packetInCallback is not None: self.packetInCallback(self.BackupID, NewPacket) lg.out(6, "restore.doSavePacket %s saved to %s" % (packetID, filename))
def SendServers(): """ My identity file can be stored in different locations, see the "sources" field. So I can use different identity servers to store more secure. This method will send my identity file to all my identity servers via transport_tcp. """ from transport.tcp import tcp_node sendfile, sendfilename = tmpfile.make("propagate") os.close(sendfile) LocalIdentity = my_id.getLocalIdentity() bpio.WriteFile(sendfilename, LocalIdentity.serialize()) dlist = [] for idurl in LocalIdentity.sources: # sources for out identity are servers we need to send to protocol, host, port, filename = nameurl.UrlParse(idurl) # if host == settings.IdentityServerName(): # host = '67.207.147.183' webport, tcpport = known_servers.by_host().get( host, (settings.IdentityWebPort(), settings.IdentityServerPort())) # srvhost = '%s:%d' % (host, int(tcpport)) dlist.append( tcp_node.send(sendfilename, (host, int(tcpport)), 'Identity', True)) # dlist.append(gateway.send_file_single('tcp', srvhost, sendfilename, 'Identity')) dl = DeferredList(dlist, consumeErrors=True) return dl
def register_key(key_id, key_object_or_string, keys_folder=None): """ """ if key_id in known_keys(): lg.warn('key %s already exists' % key_id) return None if isinstance(key_object_or_string, str): lg.out( 4, 'my_keys.register_key %s from %d bytes openssh_input_string' % (key_id, len(key_object_or_string))) key_object = unserialize_key_to_object(key_object_or_string) if not key_object: lg.warn( 'invalid openssh string, unserialize_key_to_object() failed') return None else: lg.out(4, 'my_keys.register_key %s from object' % key_id) key_object = key_object_or_string known_keys()[key_id] = key_object if not keys_folder: keys_folder = settings.KeyStoreDir() key_string = key_object.toString() if key_object.isPublic(): key_filepath = os.path.join(keys_folder, key_id + '.public') else: key_filepath = os.path.join(keys_folder, key_id + '.private') bpio.WriteFile(key_filepath, key_string) if _Debug: lg.out(_DebugLevel, ' key %s added, saved to %s' % (key_id, key_filepath)) return key_filepath
def backup_outgoing_message(private_message_object, message_id): """ """ if not driver.is_on('service_backups'): lg.warn('service_backups is not started') return False serialized_message = private_message_object.serialize() local_msg_folder = os.path.join(settings.ChatChannelsDir(), private_message_object.recipient, 'out') if not bpio._dir_exist(local_msg_folder): bpio._dirs_make(local_msg_folder) local_msg_filename = os.path.join(local_msg_folder, message_id) if not bpio.WriteFile(local_msg_filename, serialized_message): lg.warn('failed writing outgoing message locally') return False remote_path_for_message = os.path.join('.messages', 'out', private_message_object.recipient, message_id) global_message_path = global_id.MakeGlobalID(customer=messages_key_id(), path=remote_path_for_message) res = api.file_create(global_message_path) if res['status'] != 'OK': lg.warn('failed to create path "%s" in the catalog: %s' % (global_message_path, res['errors'])) return False res = api.file_upload_start(local_msg_filename, global_message_path, wait_result=False) if res['status'] != 'OK': lg.warn('failed to upload message "%s": %s' % (global_message_path, res['errors'])) return False return True
def generate_key(key_id, key_size=4096, keys_folder=None, output_type='openssh'): """ """ if key_id in known_keys(): lg.warn('key "%s" already exists' % key_id) return None lg.out(4, 'my_keys.generate_key "%s" of %d bits' % (key_id, key_size)) rsa_key = RSA.generate(key_size, os.urandom) key_object = keys.Key(rsa_key) known_keys()[key_id] = key_object if not keys_folder: keys_folder = settings.KeyStoreDir() key_string = key_object.toString(output_type) if key_object.isPublic(): key_filepath = os.path.join(keys_folder, key_id + '.public') else: key_filepath = os.path.join(keys_folder, key_id + '.private') bpio.WriteFile(key_filepath, key_string) if _Debug: lg.out(_DebugLevel, ' key %s generated, saved to %s' % (key_id, key_filepath)) return key_object
def save(result): lg.out(4, ' external IP : %s' % result) if result['result'] != 'stun-success': self.automat('stun-failed') return ip = result['ip'] bpio.WriteFile(settings.ExternalIPFilename(), ip) self.automat('stun-success', ip)
def doDetectLocalIP(self, arg): """ Action method. """ localip = net_misc.getLocalIp() bpio.WriteFile(settings.LocalIPFilename(), localip) lg.out(4, 'id_registrator.doDetectLocalIP [%s]' % localip) self.automat('local-ip-detected')
def init(): """ Got a filename for today, check if already exists, read today file, start counting. """ global CountTimeIn global CountTimeOut lg.out(4, 'bandwidth.init') fin = filenameIN() fout = filenameOUT() if not os.path.isfile(fin): bpio.WriteFile(fin, '') if not os.path.isfile(fout): bpio.WriteFile(fout, '') read_bandwidthIN() read_bandwidthOUT() CountTimeIn = time.time() CountTimeOut = time.time() reactor.addSystemEventTrigger('before', 'shutdown', save)
def setTransportOrder(orderL): """ Validate transports and save the list in the [BitDust data dir]\metadata\torder. It is useful to remember the priority of used transports. """ orderl = orderL orderL = validateTransports(orderL) orderTxt = string.join(orderl, ' ') lg.out(8, 'my_id.setTransportOrder: ' + str(orderTxt)) bpio.WriteFile(settings.DefaultTransportOrderFilename(), orderTxt)
def update(url, xml_src): """ This is a correct method to update an identity in the local cache. PREPRO need to check that date or version is after old one so not vulnerable to replay attacks. """ try: newid = identity.identity(xmlsrc=xml_src) except: lg.exc() return False if not newid.isCorrect(): lg.out(1, "identitydb.update ERROR: incorrect identity " + str(url)) return False try: if not newid.Valid(): lg.out(1, "identitydb.update ERROR identity not Valid" + str(url)) return False except: lg.exc() return False filename = os.path.join(settings.IdentityCacheDir(), nameurl.UrlFilename(url)) if os.path.exists(filename): oldidentityxml = bpio.ReadTextFile(filename) oldidentity = identity.identity(xmlsrc=oldidentityxml) if oldidentity.publickey != newid.publickey: lg.out( 1, "identitydb.update ERROR new publickey does not match old : SECURITY VIOLATION " + url) return False if oldidentity.signature != newid.signature: lg.out( 6, 'identitydb.update have new data for ' + nameurl.GetName(url)) else: idset(url, newid) return True bpio.WriteFile(filename, xml_src) # publickeys match so we can update it idset(url, newid) return True
def GenerateNewKey(keyfilename=None): global _MyKeyObject global _MyRsaKey if keyfilename is None: keyfilename = settings.KeyFileName() if os.path.exists(keyfilename + '_location'): newkeyfilename = bpio.ReadTextFile(keyfilename + '_location').strip() if os.path.exists(newkeyfilename): keyfilename = newkeyfilename lg.out(4, 'key.InitMyKey generate new private key') _MyRsaKey = RSA.generate(settings.getPrivateKeySize(), os.urandom) _MyKeyObject = keys.Key(_MyRsaKey) keystring = _MyKeyObject.toString('openssh') bpio.WriteFile(keyfilename, keystring) lg.out(4, ' wrote %d bytes to %s' % (len(keystring), keyfilename))
def step0(): lg.out(4, 'os_windows_update.step0') global _UpdatingInProgress if _UpdatingInProgress: lg.out(6, 'os_windows_update.step0 _UpdatingInProgress is True, skip.') return repo, locationURL = misc.ReadRepoLocation() src = bpio.ReadTextFile(settings.RepoFile()) if src == '': bpio.WriteFile(settings.RepoFile(), '%s\n%s' % (repo, locationURL)) _UpdatingInProgress = True d = download_version() d.addCallback(step1) d.addErrback(fail)
def saveLocalIdentity(): """ Save identity object from memory into local file. Do sign the identity than serialize to write to the file. """ global _LocalIdentity if not isLocalIdentityReady(): lg.warn("ERROR local identity not exist!") return if not _LocalIdentity.isCorrect(): lg.warn('local identity is not correct') return _LocalIdentity.sign() xmlid = _LocalIdentity.serialize() filename = bpio.portablePath(settings.LocalIdentityFilename()) bpio.WriteFile(filename, xmlid) lg.out(6, "my_id.saveLocalIdentity %d bytes wrote to %s" % (len(xmlid), filename))
def save_keys_local(keys_folder=None, output_type='openssh'): """ """ if not keys_folder: keys_folder = settings.PrivateKeysDir() if _Debug: lg.out( _DebugLevel, 'my_keys.save_keys_local will store all known keys in %s' % keys_folder) count = 0 for key_id, key_object in known_keys().items(): key_string = key_object.toString(output_type) key_filepath = os.path.join(keys_folder, key_id) bpio.WriteFile(key_filepath, key_string) count += 1 if _Debug: lg.out(_DebugLevel, ' %d keys saved' % count)
def _file_received(self, newpacket, state): if state in ['in queue', 'shutdown', 'exist', 'failed']: return if state != 'received': lg.warn("incorrect state [%s] for packet %s" % (str(state), str(newpacket))) return if not newpacket.Valid(): # TODO: if we didn't get a valid packet ... re-request it or delete # it? lg.warn("%s is not a valid packet: %r" % (newpacket.PacketID, newpacket)) return # packetID = newpacket.PacketID packetID = global_id.CanonicalID(newpacket.PacketID) customer, remotePath = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, remotePath) if os.path.isfile(filename): lg.warn("found existed file" + filename) self.automat('inbox-data-packet', packetID) return # try: # os.remove(filename) # except: # lg.exc() dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.out( 2, "backup_rebuilder._file_received ERROR can not create sub dir " + dirname) return if not bpio.WriteFile(filename, newpacket.Payload): lg.out(2, "backup_rebuilder._file_received ERROR writing " + filename) return from storage import backup_matrix backup_matrix.LocalFileReport(packetID) lg.out(10, "backup_rebuilder._file_received and wrote to " + filename) self.automat('inbox-data-packet', packetID)
def GenerateNewKey(keyfilename=None): global _MyKeyObject if keyfilename is None: keyfilename = settings.KeyFileName() if os.path.exists(keyfilename + '_location'): newkeyfilename = bpio.ReadTextFile(keyfilename + '_location').strip() if os.path.exists(newkeyfilename): keyfilename = newkeyfilename if _Debug: lg.out(_DebugLevel, 'key.InitMyKey generate new private key') _MyKeyObject = rsa_key.RSAKey() _MyKeyObject.generate(settings.getPrivateKeySize()) keystring = _MyKeyObject.toString() bpio.WriteFile(keyfilename, keystring) if _Debug: lg.out(_DebugLevel, ' wrote %d bytes to %s' % (len(keystring), keyfilename)) del keystring gc.collect()
def state_changed(self, oldstate, newstate, event, arg): """ This method intended to catch the moment when automat's state were changed. """ if newstate in ['CONNECTED', 'DISCONNECTED', 'NO_SERVICE']: supplierPath = settings.SupplierPath( self.supplier_idurl, customer_idurl=self.customer_idurl) if not os.path.isdir(supplierPath): try: os.makedirs(supplierPath) except: lg.exc() return bpio.WriteFile( settings.SupplierServiceFilename( self.supplier_idurl, customer_idurl=self.customer_idurl), newstate, )
def save_keys_local(keys_folder=None): """ """ if not keys_folder: keys_folder = settings.KeyStoreDir() if _Debug: lg.out( _DebugLevel, 'my_keys.save_keys_local will store all known keys in %s' % keys_folder) count = 0 for key_id, key_object in known_keys().items(): if key_object.isPublic(): key_filepath = os.path.join(keys_folder, key_id + '.public') else: key_filepath = os.path.join(keys_folder, key_id + '.private') key_string = key_object.toString() bpio.WriteFile(key_filepath, key_string) count += 1 if _Debug: lg.out(_DebugLevel, ' %d keys saved' % count)
def doSavePacket(self, arg): """ Action method. """ if not arg: return NewPacket, PacketID = arg glob_path = global_id.ParseGlobalID(PacketID, detect_version=True) packetID = global_id.CanonicalID(PacketID) customer_id, _, _, _, SupplierNumber, dataORparity = packetid.SplitFull( packetID) if dataORparity == 'Data': self.OnHandData[SupplierNumber] = True elif dataORparity == 'Parity': self.OnHandParity[SupplierNumber] = True if NewPacket: filename = os.path.join(settings.getLocalBackupsDir(), customer_id, glob_path['path']) dirpath = os.path.dirname(filename) if not os.path.exists(dirpath): try: bpio._dirs_make(dirpath) except: lg.exc() # either way the payload of packet is saved if not bpio.WriteFile(filename, NewPacket.Payload): lg.warn("unable to write to %s" % filename) return if self.packetInCallback is not None: self.packetInCallback(self.backup_id, NewPacket) if _Debug: lg.out( _DebugLevel, "restore_worker.doSavePacket %s saved to %s" % (packetID, filename)) else: lg.warn('new packet is None, probably already exists locally')
def init(UI='', options=None, args=None, overDict=None, executablePath=None): """ In the method ``main()`` program firstly checks the command line arguments and then calls this method to start the whole process. This initialize some low level modules and finally create an instance of ``initializer()`` state machine and send it an event "run". """ global AppDataDir from logs import lg lg.out(4, 'bpmain.run UI="%s"' % UI) from system import bpio #---settings--- from main import settings if overDict: settings.override_dict(overDict) settings.init(AppDataDir) if not options or options.debug is None: lg.set_debug_level(settings.getDebugLevel()) from main import config config.conf().addCallback('logs/debug-level', lambda p, value, o, r: lg.set_debug_level(value)) #---USE_TRAY_ICON--- if os.path.isfile(settings.LocalIdentityFilename()) and os.path.isfile( settings.KeyFileName()): try: from system.tray_icon import USE_TRAY_ICON if bpio.Mac() or not bpio.isGUIpossible(): lg.out(4, ' GUI is not possible') USE_TRAY_ICON = False if USE_TRAY_ICON: from twisted.internet import wxreactor wxreactor.install() lg.out(4, ' wxreactor installed') except: USE_TRAY_ICON = False lg.exc() else: lg.out(4, ' local identity or key file is not ready') USE_TRAY_ICON = False lg.out(4, ' USE_TRAY_ICON=' + str(USE_TRAY_ICON)) if USE_TRAY_ICON: from system import tray_icon icons_path = bpio.portablePath( os.path.join(bpio.getExecutableDir(), 'icons')) lg.out(4, 'bpmain.run call tray_icon.init(%s)' % icons_path) tray_icon.init(icons_path) def _tray_control_func(cmd): if cmd == 'exit': import shutdowner shutdowner.A('stop', 'exit') tray_icon.SetControlFunc(_tray_control_func) #---OS Windows init--- if bpio.Windows(): try: from win32event import CreateMutex mutex = CreateMutex(None, False, "BitDust") lg.out(4, 'bpmain.run created a Mutex: %s' % str(mutex)) except: lg.exc() #---twisted reactor--- lg.out(4, 'bpmain.run want to import twisted.internet.reactor') try: from twisted.internet import reactor except: lg.exc() sys.exit('Error initializing reactor in bpmain.py\n') #---logfile---- if lg.logs_enabled() and lg.log_file(): lg.out(2, 'bpmain.run want to switch log files') if bpio.Windows() and bpio.isFrozen(): lg.stdout_stop_redirecting() lg.close_log_file() lg.open_log_file(settings.MainLogFilename()) # lg.open_log_file(settings.MainLogFilename() + '-' + time.strftime('%y%m%d%H%M%S') + '.log') if bpio.Windows() and bpio.isFrozen(): lg.stdout_start_redirecting() #---memdebug--- # if settings.uconfig('logs.memdebug-enable') == 'True': # try: # from logs import memdebug # memdebug_port = int(settings.uconfig('logs.memdebug-port')) # memdebug.start(memdebug_port) # reactor.addSystemEventTrigger('before', 'shutdown', memdebug.stop) # lg.out(2, 'bpmain.run memdebug web server started on port %d' % memdebug_port) # except: # lg.exc() #---process ID--- try: pid = os.getpid() pid_file_path = os.path.join(settings.MetaDataDir(), 'processid') bpio.WriteFile(pid_file_path, str(pid)) lg.out( 2, 'bpmain.run wrote process id [%s] in the file %s' % (str(pid), pid_file_path)) except: lg.exc() # #---reactor.callLater patch--- # if lg.is_debug(12): # patchReactorCallLater(reactor) # monitorDelayedCalls(reactor) # #---plugins--- # from plugins import plug # plug.init() # reactor.addSystemEventTrigger('before', 'shutdown', plug.shutdown) lg.out( 2, " python sys.path is:\n %s" % ('\n '.join(sys.path))) lg.out(2, "bpmain.run UI=[%s]" % UI) if lg.is_debug(20): lg.out(0, '\n' + bpio.osinfofull()) lg.out(4, 'import automats') #---START!--- from automats import automat automat.LifeBegins(lg.when_life_begins()) automat.OpenLogFile(settings.AutomatsLog()) from main import events events.init() from main import initializer IA = initializer.A() lg.out(4, 'sending event "run" to initializer()') reactor.callWhenRunning(IA.automat, 'run', UI) return IA
def _save_identity(self, inputfilename): """ """ lg.out(6, "id_server._save_identity " + inputfilename) if os.path.getsize(inputfilename) > 50000: lg.warn("input file too big - ignoring ") tmpfile.erase('idsrv', inputfilename, 'input file too big') # os.remove(inputfilename) return newxml = bpio.ReadTextFile(inputfilename) if len(newxml.strip()) < 500: lg.warn("input file too small - ignoring ") tmpfile.erase('idsrv', inputfilename, 'input file too small') # os.remove(inputfilename) return try: newidentity = identity.identity(xmlsrc=newxml) except: lg.warn("input file is wrong - ignoring ") tmpfile.erase('idsrv', inputfilename, 'input file is wrong') # os.remove(inputfilename) return tmpfile.erase('idsrv', inputfilename, 'id received') if not newidentity.isCorrect(): lg.warn("has non-Correct identity") return if not newidentity.Valid(): lg.warn("has non-Valid identity") return matchid = "" for idurl in newidentity.sources: protocol, host, port, filename = nameurl.UrlParse(idurl) if host == self.hostname: lg.out(4, "id_server._save_identity found match for us") matchid = idurl break if matchid == "": lg.warn("identity is not for this nameserver") return protocol, host, port, filename = nameurl.UrlParse(matchid) name, justxml = filename.split(".") # SECURITY check that name is simple if justxml != "xml": lg.warn("identity name " + filename) return if len(name) > settings.MaximumUsernameLength(): lg.warn("identity name " + filename) return if len(name) < settings.MinimumUsernameLength(): lg.warn("identity name " + filename) return for c in name: if c not in settings.LegalUsernameChars(): lg.warn("identity name " + filename) return localfilename = os.path.join(settings.IdentityServerDir(), filename) # lg.out(8,"id_server.SaveIdentity with filename " + localfilename) oldxml = '' # need to make sure id was not already used by different key - which would mean someone trying to steal identity if os.path.exists(localfilename): lg.out( 6, "id_server._save_identity was already an identity with this name " + localfilename) oldxml = bpio.ReadTextFile(localfilename) oldidentity = identity.identity(xmlsrc=oldxml) if oldidentity.publickey != newidentity.publickey: lg.warn("new public key does not match old " + localfilename) return if newxml != oldxml: if not os.path.exists(localfilename): lg.out( 6, "id_server._save_identity will save NEW Identity: " + filename) bpio.WriteFile(localfilename, newxml)
def doVerifyAndRestore(self, arg): global _WorkingKey lg.out(4, 'identity_restorer.doVerifyAndRestore') remote_identity_src = arg if os.path.isfile(settings.KeyFileName()): lg.out( 4, 'identity_restorer.doVerifyAndRestore will backup and remove ' + settings.KeyFileName()) bpio.backup_and_remove(settings.KeyFileName()) if os.path.isfile(settings.LocalIdentityFilename()): lg.out( 4, 'identity_restorer.doVerifyAndRestore will backup and remove ' + settings.LocalIdentityFilename()) bpio.backup_and_remove(settings.LocalIdentityFilename()) try: remote_ident = identity.identity(xmlsrc=remote_identity_src) local_ident = identity.identity(xmlsrc=remote_identity_src) except: # lg.exc() reactor.callLater(0.1, self.automat, 'restore-failed', ('remote identity have incorrect format', 'red')) return lg.out( 4, 'identity_restorer.doVerifyAndRestore checking remote identity') try: res = remote_ident.isCorrect() except: lg.exc() res = False if not res: lg.out( 4, 'identity_restorer.doVerifyAndRestore remote identity is not correct FAILED!!!!' ) reactor.callLater(0.1, self.automat, 'restore-failed', ('remote identity format is not correct', 'red')) return lg.out( 4, 'identity_restorer.doVerifyAndRestore validate remote identity') try: res = remote_ident.Valid() except: lg.exc() res = False if not res: lg.out( 4, 'identity_restorer.doVerifyAndRestore validate remote identity FAILED!!!!' ) reactor.callLater(0.1, self.automat, 'restore-failed', ('remote identity is not valid', 'red')) return key.ForgetMyKey() bpio.WriteFile(settings.KeyFileName(), _WorkingKey) try: key.InitMyKey() except: key.ForgetMyKey() # lg.exc() try: os.remove(settings.KeyFileName()) except: pass reactor.callLater(0.1, self.automat, 'restore-failed', ('private key is not valid', 'red')) return try: local_ident.sign() except: # lg.exc() reactor.callLater(0.1, self.automat, 'restore-failed', ('error while signing identity', 'red')) return if remote_ident.signature != local_ident.signature: reactor.callLater( 0.1, self.automat, 'restore-failed', ('signature did not match, key verification failed!', 'red')) return my_id.setLocalIdentity(local_ident) my_id.saveLocalIdentity() bpio.WriteFile(settings.UserNameFilename(), my_id.getIDName()) if os.path.isfile(settings.KeyFileName() + '.backup'): lg.out( 4, 'identity_restorer.doVerifyAndRestore will remove backup file for ' + settings.KeyFileName()) bpio.remove_backuped_file(settings.KeyFileName()) if os.path.isfile(settings.LocalIdentityFilename() + '.backup'): lg.out( 4, 'identity_restorer.doVerifyAndRestore will remove backup file for ' + settings.LocalIdentityFilename()) bpio.remove_backuped_file(settings.LocalIdentityFilename()) reactor.callLater(0.1, self.automat, 'restore-success')
def Data(request): """ This is when we 1) save my requested data to restore the backup 2) or save the customer file on our local HDD. """ if _Debug: lg.out( _DebugLevel, 'p2p_service.Data %d bytes in [%s] by %s | %s' % (len(request.Payload), request.PacketID, request.OwnerID, request.CreatorID)) # 1. this is our Data! if request.OwnerID == my_id.getLocalID(): if _Debug: lg.out( _DebugLevel, "p2p_service.Data %r for us from %s" % (request, nameurl.GetName(request.RemoteID))) if driver.is_on('service_backups'): # TODO: move this into callback settings.BackupIndexFileName() indexPacketID = global_id.MakeGlobalID( idurl=my_id.getLocalID(), path=settings.BackupIndexFileName()) if request.PacketID == indexPacketID: from storage import backup_control backup_control.IncomingSupplierBackupIndex(request) return True return False # 2. this Data is not belong to us if not driver.is_on('service_supplier'): return SendFail(request, 'supplier service is off') if not contactsdb.is_customer(request.OwnerID): # SECURITY lg.warn("%s not a customer, packetID=%s" % (request.OwnerID, request.PacketID)) SendFail(request, 'not a customer') return glob_path = global_id.ParseGlobalID(request.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID(my_id.getGlobalID() + ':' + request.PacketID) if not glob_path['path']: lg.warn("got incorrect PacketID") SendFail(request, 'incorrect PacketID') return # TODO: process files from another customer : glob_path['idurl'] filename = makeFilename(request.OwnerID, glob_path['path']) if not filename: lg.warn("got empty filename, bad customer or wrong packetID? ") SendFail(request, 'empty filename') return dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.warn("ERROR can not create sub dir " + dirname) SendFail(request, 'write error') return data = request.Serialize() donated_bytes = settings.getDonatedBytes() if not os.path.isfile(settings.CustomersSpaceFile()): bpio._write_dict(settings.CustomersSpaceFile(), {'free': donated_bytes}) if _Debug: lg.out(_DebugLevel, 'p2p_service.Data created a new space file') space_dict = bpio._read_dict(settings.CustomersSpaceFile()) if request.OwnerID not in space_dict.keys(): lg.warn("no info about donated space for %s" % request.OwnerID) SendFail(request, 'no info about donated space') return used_space_dict = bpio._read_dict(settings.CustomersUsedSpaceFile(), {}) if request.OwnerID in used_space_dict.keys(): try: bytes_used_by_customer = int(used_space_dict[request.OwnerID]) bytes_donated_to_customer = int(space_dict[request.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len(data): lg.warn("no free space for %s" % request.OwnerID) SendFail(request, 'no free space') return except: lg.exc() if not bpio.WriteFile(filename, data): lg.warn("ERROR can not write to " + str(filename)) SendFail(request, 'write error') return SendAck(request, str(len(request.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) del data if _Debug: lg.out( _DebugLevel, "p2p_service.Data saved from [%s | %s] to %s" % ( request.OwnerID, request.CreatorID, filename, ))
def _on_data(self, newpacket): import os from twisted.internet import reactor from logs import lg from system import bpio from main import settings from userid import my_id from userid import global_id from contacts import contactsdb from p2p import p2p_service if newpacket.OwnerID == my_id.getLocalID(): # this Data belong to us, SKIP return False if not contactsdb.is_customer(newpacket.OwnerID): # SECURITY # TODO: process files from another customer : glob_path['idurl'] lg.warn("skip, %s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID)) # p2p_service.SendFail(newpacket, 'not a customer') return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") p2p_service.SendFail(newpacket, 'empty filename') return False dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.err("can not create sub dir %s" % dirname) p2p_service.SendFail(newpacket, 'write error') return False data = newpacket.Serialize() donated_bytes = settings.getDonatedBytes() if not os.path.isfile(settings.CustomersSpaceFile()): bpio._write_dict(settings.CustomersSpaceFile(), { 'free': donated_bytes, }) lg.warn('created a new space file: %s' % settings.CustomersSpaceFile()) space_dict = bpio._read_dict(settings.CustomersSpaceFile()) if newpacket.OwnerID not in space_dict.keys(): lg.err("no info about donated space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no info about donated space') return False used_space_dict = bpio._read_dict(settings.CustomersUsedSpaceFile(), {}) if newpacket.OwnerID in used_space_dict.keys(): try: bytes_used_by_customer = int( used_space_dict[newpacket.OwnerID]) bytes_donated_to_customer = int(space_dict[newpacket.OwnerID]) if bytes_donated_to_customer - bytes_used_by_customer < len( data): lg.warn("no free space for %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no free space') return False except: lg.exc() if not bpio.WriteFile(filename, data): lg.err("can not write to %s" % str(filename)) p2p_service.SendFail(newpacket, 'write error') return False # Here Data() packet was stored as it is on supplier node (current machine) sz = len(data) del data lg.out( self.debug_level, "service_supplier._on_data %r saved from [%s | %s] to %s with %d bytes" % ( newpacket, newpacket.OwnerID, newpacket.CreatorID, filename, sz, )) p2p_service.SendAck(newpacket, str(len(newpacket.Payload))) from supplier import local_tester reactor.callLater(0, local_tester.TestSpaceTime) if self.publish_event_supplier_file_modified: from main import events events.send('supplier-file-modified', data=dict( action='write', glob_path=glob_path['path'], owner_id=newpacket.OwnerID, )) return True
def doSaveName(self, arg): settings.setPrivateKeySize(arg['pksize']) bpio.WriteFile(settings.UserNameFilename(), arg['username'])