def _recover(): from system import bpio from lib import nameurl if len(args) < 3: return 2 src = bpio.ReadBinaryFile(args[2]) if len(src) > 1024 * 10: print_text('file is too big for private key') return 0 try: lines = src.split('\n') idurl = lines[0] txt = '\n'.join(lines[1:]) if idurl != nameurl.FilenameUrl(nameurl.UrlFilename(idurl)): idurl = '' txt = src except: idurl = '' txt = src if idurl == '' and len(args) > 3: idurl = args[3] if idurl == '': print_text('BitDust need to know your IDURL to recover your account\n') return 2 from automats import automat from main import initializer initializer.A('run-cmd-line-recover', {'idurl': idurl, 'keysrc': txt}) reactor.run() automat.objects().clear() if my_id.isLocalIdentityReady(): print_text('your identity were restored:') print_text(my_id.getLocalIdentity().serialize()) else: print_text('identity recovery FAILED') return 0
def cmd_key(opts, args, overDict, running, executablePath): from main import settings from lib import misc from system import bpio from userid import my_id from crypt import key settings.init() my_id.init() if not key.LoadMyKey(): print_text('private key not exist or is not valid\n') return 0 if not my_id.isLocalIdentityReady(): print_text('local identity not exist, your key worth nothing\n') return 0 if len(args) == 2: if args[1] == 'copy': TextToSave = my_id.getLocalID() + "\n" + key.MyPrivateKey() misc.setClipboardText(TextToSave) del TextToSave print_text('now you can "paste" with Ctr+V your private key where you want') print_text('WARNING! keep your key in safe place, do not publish it!\n') return 0 elif args[1] == 'print': TextToSave = my_id.getLocalID() + "\n" + key.MyPrivateKey() print_text('\n' + TextToSave + '\n') del TextToSave print_text('WARNING! keep your key in safe place, do not publish it!\n') return 0 elif len(args) == 3: if args[1] == 'copy' or args[1] == 'save' or args[1] == 'backup': from system import bpio curpath = os.getcwd() os.chdir(executablePath) filenameto = bpio.portablePath(args[2]) os.chdir(curpath) TextToSave = my_id.getLocalID() + "\n" + key.MyPrivateKey() if not bpio.AtomicWriteFile(filenameto, TextToSave): del TextToSave print_text('error writing to %s\n' % filenameto) return 1 del TextToSave print_text('your private key were copied to file %s' % filenameto) print_text('WARNING! keep your key in safe place, do not publish it!\n') return 0 return 2
def main(): """ This should print a current identity or create a new one. """ from userid import my_id my_id.loadLocalIdentity() if my_id.isLocalIdentityReady(): my_id.getLocalIdentity().sign() print my_id.getLocalIdentity().serialize() print 'Valid is: ', my_id.getLocalIdentity().Valid() else: my_id.setLocalIdentity(my_id.buildDefaultIdentity(sys.argv[1])) my_id.saveLocalIdentity() print my_id.getLocalIdentity().serialize() print 'Valid is: ', my_id.getLocalIdentity().Valid() my_id._LocalIdentity = None my_id.loadLocalIdentity()
def doSendMyIdentity(self, arg): """ Action method. """ # TODO: just to debug - skip sending to ID servers and go further # self.state = 'REQUEST_ID' # self.event('my-id-exist', self.new_identity.serialize()) # return mycurrentidentity = None if my_id.isLocalIdentityReady(): mycurrentidentity = my_id.getLocalIdentity() my_id.setLocalIdentity(self.new_identity) def _cb(x): my_id.setLocalIdentity(mycurrentidentity) self.automat('my-id-sent') def _eb(x): my_id.setLocalIdentity(mycurrentidentity) self.automat('my-id-failed') dl = self._send_new_identity() dl.addCallback(_cb) dl.addErrback(_eb)
def _register(): if len(args) <= 2: return 2 pksize = settings.getPrivateKeySize() if len(args) > 3: try: pksize = int(args[3]) except: print_text('incorrect private key size\n') return 0 from automats import automat from main import initializer from lib import misc if not misc.ValidUserName(args[2]): return 0 initializer.A('run-cmd-line-register', {'username': args[2], 'pksize': pksize}) reactor.run() automat.objects().clear() if my_id.isLocalIdentityReady(): print_text('new identity created:') print_text(my_id.getLocalIdentity().serialize()) else: print_text('identity creation FAILED') return 0
def doSendMyIdentity(self, *args, **kwargs): """ Action method. """ # TODO: just to debug - skip sending to ID servers and go further # self.state = 'REQUEST_ID' # self.event('my-id-exist', self.new_identity.serialize()) # return mycurrentidentity = None if my_id.isLocalIdentityReady(): mycurrentidentity = my_id.getLocalIdentity() my_id.setLocalIdentity(self.new_identity) def _cb(x): my_id.setLocalIdentity(mycurrentidentity) self.automat('my-id-sent') def _eb(x): my_id.setLocalIdentity(mycurrentidentity) self.automat('my-id-failed') dl = self._send_new_identity() dl.addCallback(_cb) dl.addErrback(_eb)
def identity_cached(new_id_obj): """ After receiving identity file of another user we need to check his identity sources. I can be file from identity server or Identity() packet received directly from remote peer. Also it can be my own identity that was changed locally. In any case we need to take certain actions if those identity sources changed. First identity source forms IDURL of that identity and act as unique global ID of that BitDust node. When first identity source changed (because identity server went down) identity is "rotated": second identity source will be placed on the first position and IDURL will change. In that case we need to remember new IDURL and keep track of old IDURL of that user - this way we can match and merge different IDURL's for one owner. """ global _IdentityHistoryDir global _KnownUsers global _KnownIDURLs global _MergedIDURLs from userid import identity pub_key = new_id_obj.getPublicKey() user_name = new_id_obj.getIDName() if _Debug: lg.args(_DebugLevel, user_name=user_name) is_identity_rotated = False latest_id_obj = None latest_sources = [] if pub_key not in _KnownUsers: user_path = tempfile.mkdtemp(prefix=user_name+'@', dir=_IdentityHistoryDir) _KnownUsers[pub_key] = user_path first_identity_file_path = os.path.join(user_path, '0') local_fs.WriteBinaryFile(first_identity_file_path, new_id_obj.serialize()) if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached wrote first item for user %r in identity history: %r' % ( user_name, first_identity_file_path)) else: user_path = _KnownUsers[pub_key] user_identity_files = sorted(map(int, os.listdir(user_path))) if len(user_identity_files) == 0: raise Exception('identity history for user %r is broken, public key is known, but no identity files found' % user_name) latest_identity_file_path = '' latest_pub_key = None latest_revision = -1 known_revisions = set() for id_file in user_identity_files: identity_file_path = os.path.join(user_path, strng.to_text(id_file)) xmlsrc = local_fs.ReadBinaryFile(identity_file_path) one_id_obj = identity.identity(xmlsrc=xmlsrc) if not one_id_obj.isCorrect(): lg.err('identity history for user %r is broken, identity in the file %r is not correct' % (user_name, identity_file_path)) continue if not one_id_obj.Valid(): lg.err('identity history for user %r is broken, identity in the file %r is not valid' % (user_name, identity_file_path)) continue if not latest_pub_key: latest_pub_key = one_id_obj.getPublicKey() if latest_pub_key != one_id_obj.getPublicKey(): lg.err('identity history for user %r is broken, public key not matching in the file %r' % (user_name, identity_file_path)) continue known_revisions.add(one_id_obj.getRevisionValue()) if one_id_obj.getRevisionValue() > latest_revision: latest_revision = one_id_obj.getRevisionValue() latest_identity_file_path = identity_file_path xmlsrc = local_fs.ReadBinaryFile(latest_identity_file_path) latest_id_obj = identity.identity(xmlsrc=xmlsrc) if latest_id_obj.getPublicKey() != new_id_obj.getPublicKey(): raise Exception('identity history for user %r is broken, public key not matching' % user_name) if latest_id_obj.getIDName() != new_id_obj.getIDName(): lg.warn('found another user name in identity history for user %r : %r' % (user_name, latest_id_obj.getIDName())) if new_id_obj.getRevisionValue() in known_revisions: if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached revision %d already known for user %r' % (new_id_obj.getRevisionValue(), user_name)) else: latest_sources = latest_id_obj.getSources(as_originals=True) new_sources = new_id_obj.getSources(as_originals=True) if latest_sources == new_sources: local_fs.WriteBinaryFile(latest_identity_file_path, new_id_obj.serialize()) if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached latest identity sources for user %r did not changed, updated file %r' % ( user_name, latest_identity_file_path)) else: next_identity_file = user_identity_files[-1] + 1 next_identity_file_path = os.path.join(user_path, strng.to_text(next_identity_file)) local_fs.WriteBinaryFile(next_identity_file_path, new_id_obj.serialize()) is_identity_rotated = True if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached identity sources for user %r changed, wrote new item in the history: %r' % ( user_name, next_identity_file_path)) new_revision = new_id_obj.getRevisionValue() new_sources = new_id_obj.getSources(as_originals=True) for new_idurl in reversed(new_sources): if new_idurl not in _KnownIDURLs: _KnownIDURLs[new_idurl] = new_id_obj.getPublicKey() if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached new IDURL added: %r' % new_idurl) else: if _KnownIDURLs[new_idurl] != new_id_obj.getPublicKey(): lg.warn('another user had same identity source: %r' % new_idurl) _KnownIDURLs[new_idurl] = new_id_obj.getPublicKey() if pub_key not in _MergedIDURLs: _MergedIDURLs[pub_key] = {} if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached new Public Key added: %s...' % pub_key[-10:]) prev_idurl = _MergedIDURLs[pub_key].get(new_revision, None) if new_revision in _MergedIDURLs[pub_key]: if _MergedIDURLs[pub_key][new_revision] != new_idurl: if nameurl.GetName(_MergedIDURLs[pub_key][new_revision]) == nameurl.GetName(new_idurl): if _MergedIDURLs[pub_key][new_revision] not in new_sources: lg.warn('rewriting existing identity revision %d : %r -> %r' % ( new_revision, _MergedIDURLs[pub_key][new_revision], new_idurl)) _MergedIDURLs[pub_key][new_revision] = new_idurl else: _MergedIDURLs[pub_key][new_revision] = new_idurl if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached added new revision %d for user %r, total revisions %d: %r -> %r' % ( new_revision, user_name, len(_MergedIDURLs[pub_key]), prev_idurl, new_idurl)) if _Debug: lg.args(_DebugLevel, is_identity_rotated=is_identity_rotated, latest_id_obj=bool(latest_id_obj)) if is_identity_rotated and latest_id_obj is not None: latest_revision = latest_id_obj.getRevisionValue() if _Debug: lg.args(_DebugLevel, new_revision=new_revision, latest_revision=latest_revision) if new_revision > latest_revision: lg.info('found rotated identity after caching %r -> %r' % ( latest_id_obj.getSources(as_originals=True)[0], new_sources[0])) from main import events events.send('identity-rotated', data=dict( old_idurls=latest_id_obj.getSources(as_originals=True), new_idurls=new_id_obj.getSources(as_originals=True), old_revision=latest_id_obj.getRevisionValue(), new_revision=new_revision, )) if latest_id_obj.getIDURL(as_original=True) != new_id_obj.getIDURL(as_original=True): events.send('identity-url-changed', data=dict( old_idurl=latest_id_obj.getIDURL(as_original=True), new_idurl=new_id_obj.getIDURL(as_original=True), old_revision=latest_id_obj.getRevisionValue(), new_revision=new_revision, )) from userid import my_id if my_id.isLocalIdentityReady(): if my_id.getLocalID() == new_id_obj.getIDURL(): events.send('my-identity-rotated', data=dict( old_idurls=latest_id_obj.getSources(as_originals=True), new_idurls=new_id_obj.getSources(as_originals=True), old_revision=latest_id_obj.getRevisionValue(), new_revision=new_revision, )) if latest_id_obj.getIDURL(as_original=True) != new_id_obj.getIDURL(as_original=True): events.send('my-identity-url-changed', data=dict( old_idurl=latest_id_obj.getIDURL(as_original=True), new_idurl=new_id_obj.getIDURL(as_original=True), old_revision=latest_id_obj.getRevisionValue(), new_revision=new_revision, )) else: lg.warn('cached out-dated revision %d for %r' % (new_revision, new_sources[0])) else: if _Debug: lg.out(_DebugLevel, 'id_url.identity_cached revision %d for %r' % (new_revision, new_sources[0])) return True
def cmd_identity(opts, args, overDict, running): from userid import my_id from main import settings settings.init() my_id.init() if args[0] == 'idurl': if my_id.isLocalIdentityReady(): print_text(my_id.getLocalID()) else: print_text('local identity is not exist') return 0 if len(args) == 1 or args[1].lower() in ['info', '?', 'show', 'print']: if my_id.isLocalIdentityReady(): print_text(my_id.getLocalIdentity().serialize()) else: print_text('local identity is not exist') return 0 def _register(): if len(args) <= 2: return 2 pksize = settings.getPrivateKeySize() if len(args) > 3: try: pksize = int(args[3]) except: print_text('incorrect private key size\n') return 0 from automats import automat from main import initializer from lib import misc if not misc.ValidUserName(args[2]): return 0 initializer.A('run-cmd-line-register', {'username': args[2], 'pksize': pksize}) reactor.run() automat.objects().clear() if my_id.isLocalIdentityReady(): print_text('new identity created:') print_text(my_id.getLocalIdentity().serialize()) else: print_text('identity creation FAILED') return 0 def _recover(): from system import bpio from lib import nameurl if len(args) < 3: return 2 src = bpio.ReadBinaryFile(args[2]) if len(src) > 1024 * 10: print_text('file is too big for private key') return 0 try: lines = src.split('\n') idurl = lines[0] txt = '\n'.join(lines[1:]) if idurl != nameurl.FilenameUrl(nameurl.UrlFilename(idurl)): idurl = '' txt = src except: idurl = '' txt = src if idurl == '' and len(args) > 3: idurl = args[3] if idurl == '': print_text('BitDust need to know your IDURL to recover your account\n') return 2 from automats import automat from main import initializer initializer.A('run-cmd-line-recover', {'idurl': idurl, 'keysrc': txt}) reactor.run() automat.objects().clear() if my_id.isLocalIdentityReady(): print_text('your identity were restored:') print_text(my_id.getLocalIdentity().serialize()) else: print_text('identity recovery FAILED') return 0 if args[1].lower() in ['create', 'new', 'register', 'generate', ]: if my_id.isLocalIdentityReady(): print_text('local identity [%s] already exist\n' % my_id.getIDName()) return 1 if running: print_text('BitDust is running at the moment, need to stop the software at first\n') return 0 return _register() if args[1].lower() in ['restore', 'recover', 'read', 'load', ]: if running: print_text('BitDust is running at the moment, need to stop the software at first\n') return 0 return _recover() if args[1].lower() in ['delete', 'remove', 'erase', 'del', 'rm', 'kill']: if running: print_text('BitDust is running at the moment, need to stop the software at first\n') return 0 oldname = my_id.getIDName() my_id.forgetLocalIdentity() my_id.eraseLocalIdentity() print_text('local identity [%s] is no longer exist\n' % oldname) return 0 return 2
def installed(self): from userid import my_id if not my_id.isLocalIdentityReady(): return False return True
def is_identity_authenticated(): ok = my_id.isLocalIdentityReady() and key.isMyKeyReady() # lg.out(8, 'django.is_identity_authenticated node=%s' % (ok)) return ok
def _on_check_network_connect(self): from p2p import network_service from userid import my_id if my_id.isLocalIdentityReady(): network_service.connected(wait_timeout=0.1) return None
def connected(wait_timeout=5): ret = Deferred() if driver.is_enabled('service_proxy_transport'): p2p_connector_lookup = automat.find('p2p_connector') if p2p_connector_lookup: p2p_connector_machine = automat.by_index(p2p_connector_lookup[0]) if p2p_connector_machine and p2p_connector_machine.state == 'CONNECTED': proxy_receiver_lookup = automat.find('proxy_receiver') if proxy_receiver_lookup: proxy_receiver_machine = automat.by_index( proxy_receiver_lookup[0]) if proxy_receiver_machine and proxy_receiver_machine.state == 'LISTEN': # service_proxy_transport() is enabled, proxy_receiver() is listening: all good wait_timeout_defer = Deferred() wait_timeout_defer.addBoth(lambda _: ret.callback({ 'service_network': 'started', 'service_gateway': 'started', 'service_p2p_hookups': 'started', 'service_proxy_transport': 'started', 'proxy_receiver_state': proxy_receiver_machine.state, })) if not wait_timeout: wait_timeout = 0.01 wait_timeout_defer.addTimeout(wait_timeout, clock=reactor) return ret else: d = driver.is_healthy('service_proxy_transport') d.addCallback(on_service_proxy_transport_check_healthy, wait_timeout=wait_timeout) d.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='network_service.connected', ignore=True) lg.warn( 'disconnected, reason is proxy_receiver() not started yet' ) ret.callback( dict( error='disconnected', reason='proxy_receiver_not_started', )) return ret if not my_id.isLocalIdentityReady(): lg.warn('local identity is not valid or not exist') ret.callback( dict( error='local identity is not valid or not exist', reason='identity_not_exist', )) return ret if not driver.is_enabled('service_network'): lg.warn('service_network() is disabled') ret.callback( dict( error='service_network() is disabled', reason='service_network_disabled', )) return ret if not driver.is_enabled('service_gateway'): lg.warn('service_gateway() is disabled') ret.callback( dict( error='service_gateway() is disabled', reason='service_gateway_disabled', )) return ret if not driver.is_enabled('service_p2p_hookups'): lg.warn('service_p2p_hookups() is disabled') ret.callback( dict( error='service_p2p_hookups() is disabled', reason='service_p2p_hookups_disabled', )) return ret do_service_test('service_network', ret, wait_timeout) return ret
def Identity(newpacket, send_ack=True): """ Normal node or Identity server is sending us a new copy of an identity for a contact of ours. Checks that identity is signed correctly. Sending requests to cache all sources (other identity servers) holding that identity. """ # TODO: move to service_gateway newxml = newpacket.Payload newidentity = identity.identity(xmlsrc=newxml) # SECURITY # check that identity is signed correctly # old public key matches new one # this is done in `UpdateAfterChecking()` idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.info('received new identity %s rev %r' % (idurl, newidentity.getRevisionValue())) if not identitycache.UpdateAfterChecking(idurl, newxml): lg.warn("ERROR has non-Valid identity") return False if my_id.isLocalIdentityReady(): if newidentity.getPublicKey() == my_id.getLocalIdentity().getPublicKey( ): if newidentity.getRevisionValue() > my_id.getLocalIdentity( ).getRevisionValue(): lg.warn( 'received my own identity from another user, but with higher revision number' ) reactor.callLater(0, my_id.rebuildLocalIdentity, new_revision=newidentity.getRevisionValue() + 1) # @UndefinedVariable return False latest_identity = id_url.get_latest_ident(newidentity.getPublicKey()) if latest_identity: if latest_identity.getRevisionValue() > newidentity.getRevisionValue(): # check if received identity is the most recent revision number we every saw for that remote user # in case we saw same identity with higher revision number need to reply with Fail packet and notify user # this may happen after identity restore - the user starts counting revision number from 0 # but other nodes already store previous copies, user just need to jump to the most recent revision number lg.warn( 'received new identity with out-dated revision number from %r' % idurl) ident_packet = signed.Packet( Command=commands.Identity(), OwnerID=latest_identity.getIDURL(), CreatorID=latest_identity.getIDURL(), PacketID='identity:%s' % packetid.UniqueID(), Payload=latest_identity.serialize(), RemoteID=idurl, ) reactor.callLater(0, packet_out.create, outpacket=ident_packet, wide=True, callbacks={}, keep_alive=False) # @UndefinedVariable return False # Now that we have ID we can check packet if not newpacket.Valid(): # If not valid do nothing lg.warn("not Valid packet from %s" % idurl) return False if not send_ack: if _Debug: lg.out( _DebugLevel, "p2p_service.Identity %s idurl=%s remoteID=%r skip sending Ack()" % (newpacket.PacketID, idurl, newpacket.RemoteID)) return True if newpacket.OwnerID == idurl: if _Debug: lg.out( _DebugLevel, "p2p_service.Identity %s idurl=%s remoteID=%r sending wide Ack()" % (newpacket.PacketID, idurl, newpacket.RemoteID)) else: if _Debug: lg.out( _DebugLevel, "p2p_service.Identity %s idurl=%s remoteID=%r but packet ownerID=%s sending wide Ack()" % ( newpacket.PacketID, idurl, newpacket.RemoteID, newpacket.OwnerID, )) # wide=True : a small trick to respond to all his contacts reactor.callLater(0, SendAck, newpacket, wide=True) # @UndefinedVariable return True