class IPService(metaclass=IPServiceRegistry): """ InterPlanetary Service (attached to an IPID) """ # Service constants SRV_TYPE_DWEBBLOG = 'DwebBlogService' SRV_TYPE_GALLERY = 'DwebGalleryService' SRV_TYPE_ATOMFEED = 'DwebAtomFeedService' SRV_TYPE_AVATAR = 'DwebAvatarService' SRV_TYPE_VC = 'VerifiableCredentialService' SRV_TYPE_GENERICPYRAMID = 'GalacteekPyramidService' SRV_TYPE_CHAT = 'GalacteekChatService' SRV_TYPE_LIVEPEER_STREAMING = 'P2PLivePeerStreamingService' SRV_TYPE_COLLECTION = 'ObjectsCollectionService' forTypes = [] container = False def __init__(self, dagNode: dict, ipid): self._srv = dagNode self._didEx = didExplode(self.id) self._ipid = ipid self.sChanged = AsyncSignal() self.sChanged.connectTo(self.onChanged) @property def ipid(self): return self._ipid @property def id(self): return self._srv['id'] @property def description(self): return self._srv.get('description', 'No description') @property def pubdate(self): return self._srv.get('datepublished') @property def srvIpId(self): return self._didEx['id'] if self._didEx else None @property def srvFragment(self): return self._didEx['fragment'] if self._didEx else None @property def srvPath(self): return self._didEx['path'] if self._didEx else None @property def type(self): return self._srv['type'] @property def endpoint(self): return self._srv['serviceEndpoint'] async def expandEndpoint(self): exSrv = await self.ipid.expandService(self.id) if not exSrv: return None try: return exSrv.get('https://w3id.org/did#serviceEndpoint')[0] except Exception: return None async def onChanged(self): pass def __repr__(self): return self.id def __str__(self): if self.type == self.SRV_TYPE_DWEBBLOG: srvStr = 'User blog' elif self.type == self.SRV_TYPE_GALLERY: srvStr = 'Image gallery' elif self.type == self.SRV_TYPE_GENERICPYRAMID: srvStr = 'Generic pyramid' elif self.type == self.SRV_TYPE_ATOMFEED: srvStr = 'Atom feed' elif self.type == self.SRV_TYPE_AVATAR: srvStr = 'User avatar' else: srvStr = 'Unknown service' return 'IPS: {srv}'.format(srv=srvStr)
class IPIdentifier(DAGOperations): """ InterPlanetary IDentifier (decentralized identity) This tries to follow the IPID spec as much as possible. """ def __init__(self, did, localId=False, ldCache=None): self._did = did self._p2pServices = weakref.WeakValueDictionary() self._document = {} self._docCid = None self._localId = localId self._lastResolve = None self._latestModified = None self._unlocked = False self.rsaAgent = None # JSON-LD expanded cache self.cache = ldCache if ldCache else LRUCache(4) # Async sigs self.sChanged = AsyncSignal(str) self.sServicesChanged = AsyncSignal() self.sServiceAvailable = AsyncSignal(IPIdentifier, IPService) self.sChanged.connectTo(self.onDidChanged) @property def local(self): return self._localId is True @property def unlocked(self): return self._unlocked @property def dagCid(self): return self.docCid @property def dagIpfsPath(self): return IPFSPath(self.dagCid) @property def docCid(self): return self._docCid @docCid.setter def docCid(self, cid): self.message('Updating DIDDoc CID from {prev} to {n}'.format( prev=self._docCid, n=cid)) if cidValid(cid): self._docCid = cid @property def doc(self): return self._document @property def did(self): return self._did @property def p2pServices(self): return self._p2pServices @property def id(self): return self.ipnsKey @property def ipnsKey(self): exploded = didExplode(self.did) if exploded and exploded['method'] == 'ipid': return exploded['id'] @property def latestModified(self): return self._latestModified def contextUrl(self, path, fragment=None): return str( URL.build( host='galacteek.ld', scheme='ipschema', path='/' + path, fragment=fragment ) ) def message(self, msg, level='debug'): getattr(log, level)('IPID ({loc}) ({did}): {msg}'.format( did=self.did, msg=msg, loc='local' if self._localId else 'net')) def didFragment(self, frag): return self.didUrl(fragment=frag) def didUrl(self, params=None, path=None, query=None, fragment=None): """ Build a DID URL with the given params """ url = '{did}'.format(did=self.did) if isinstance(params, dict): p = ['{0}={1}'.format(k, str(v)) for k, v in params.items()] url += ';' + ';'.join(p) if isinstance(path, str): # url += path if not path.startswith('/'): url += f'/{path}' else: url += path if isinstance(query, dict): url += '?' + urlencode(query) if isinstance(fragment, str): if not fragment.startswith('#'): url += '#' + fragment else: url += fragment return url def dump(self): """ Dump the DID document to stdout """ print(json.dumps(self.doc, indent=4)) async def onDidChanged(self, cid): cacheKey = repr(self) if cacheKey in self.cache: # Reset expanded cache self.message('LRU cache reset') del self.cache[cacheKey] @ipfsOp async def unlock(self, ipfsop, rsaPassphrase=None): rsaAgent = await self.rsaAgentGet(ipfsop) if not rsaAgent: raise Exception('Agent') if await rsaAgent.privKeyUnlock(passphrase=rsaPassphrase): self._unlocked = True else: self._unlocked = False return self.unlocked async def rsaAgentGet(self, ipfsop): curProfile = ipfsop.ctx.currentProfile if self.rsaAgent: return self.rsaAgent if self.local: privKeyPath = curProfile._didKeyStore._privateKeyPathForDid( self.did) if not privKeyPath: raise Exception('Cannot find private DID key') pubKeyPem = await self.pubKeyPemGet(idx=0) self.rsaAgent = IpfsRSAAgent( ipfsop.ctx.rsaExec, pubKeyPem, privKeyPath) return self.rsaAgent @ipfsOp async def pssSign64(self, ipfsop, message: bytes): agent = await self.rsaAgentGet(ipfsop) if agent: return await agent.pssSign64(message) @ipfsOp async def pssVerif(self, ipfsop, message: bytes, signature: bytes): pubKeyPem = await self.pubKeyPemGet(idx=0) if pubKeyPem: return await ipfsop.ctx.rsaExec.pssVerif( message, signature, pubKeyPem) @ipfsOp async def inline(self, ipfsop, path=''): # In-line the JSON-LD contexts in the DAG for JSON-LD usage return await ipfsop.ldInline(await self.get(path=path)) @ipfsOp async def compact(self, ipfsop): async with ipfsop.ldOps() as ld: return await ld.dagCompact( self.dagIpfsPath ) @amlrucache async def expand(self): return await self._expand() @ipfsOp async def _expand(self, ipfsop, path=''): """ Perform a JSON-LD expansion on the DID document """ try: async with ipfsop.ldOps() as ld: expanded = await ld.dagExpandAggressive( self.dagIpfsPath) if isinstance(expanded, list) and len(expanded) > 0: return expanded[0] except Exception as err: self.message('Error expanding DID document: {}'.format( str(err))) @ipfsOp async def expandService(self, ipfsop, srvId): try: expanded = await self.expand() if not expanded: return None for srv in expanded.get('https://w3id.org/did#service', []): if srv.get('@id') == srvId: return srv except Exception: pass @ipfsOp async def compactService(self, ipfsop, srvId): """ TODO: optimize, this is gonna be called often Caching is the lazy option """ try: compact = await self.compact() for srv in compact['service']: if srv.get('id') == srvId: return srv return None except Exception: pass async def update(self, obj: dict, publish=False): self._document.update(obj) await self.updateDocument(self.doc, publish=publish) async def flush(self): await self.updateDocument(self.doc, publish=True) async def servicePropagate(self, service: IPService): log.debug(f'Propagating IP service: {service}') await self.sServiceAvailable.emit(self, service) async def addServiceRaw(self, service: dict, publish=True): sid = service.get('id') assert isinstance(sid, str) didEx = didExplode(sid) assert didEx is not None if await self.searchServiceById(sid) is not None: raise IPIDServiceException( 'An IP service already exists with this ID') self._document['service'].append(service) await self.updateDocument(self.doc, publish=publish) await self.sServicesChanged.emit() return self._serviceInst(service) @ipfsOp async def addServiceContexted(self, ipfsop, service: dict, endpoint=None, publish=True, contextInline=False, context='IpfsObjectEndpoint'): sid = service.get('id') assert isinstance(sid, str) didEx = didExplode(sid) assert didEx is not None if await self.searchServiceById(sid) is not None: raise IPIDServiceException( 'An IP service already exists with this ID') if contextInline is True: service['serviceEndpoint'] = { '@context': await ipfsop.ldContext(context) } else: service['serviceEndpoint'] = { '@context': f'ips://galacteek.ld/{context}', '@type': context } if isinstance(endpoint, dict): service['serviceEndpoint'].update(endpoint) self._document['service'].append(service) await self.updateDocument(self.doc, publish=publish) await self.sServicesChanged.emit() sInst = self._serviceInst(service) await self.servicePropagate(sInst) return sInst @ipfsOp async def addServiceCollection(self, ipfsop, name): return await self.addServiceContexted({ 'id': self.didUrl( path=posixpath.join('/collections', name) ), 'type': IPService.SRV_TYPE_COLLECTION, }, context='ObjectsCollectionEndpoint', endpoint={ 'name': name, 'created': utcDatetimeIso(), 'objects': [] }, publish=True) @ipfsOp async def addServiceRendezVous(self, ipfsop): serviceName = 'ps-rendezvous' return await self.addServiceRaw({ 'id': self.didUrl( path=f'/{serviceName}' ), 'type': IPService.SRV_TYPE_PSRENDEZVOUS, 'description': 'PubSub rendezvous', 'serviceEndpoint': ipfsop.p2pEndpoint(serviceName) }) @ipfsOp async def addServiceVideoCall(self, ipfsop, roomName): servicePath = posixpath.join('videocall', roomName) return await self.addServiceContexted({ 'id': self.didUrl( path=servicePath ), 'type': IPService.SRV_TYPE_VIDEOCALL, }, context='DwebVideoCallServiceEndpoint', endpoint={ 'roomName': roomName, 'created': utcDatetimeIso(), 'p2pEndpoint': ipfsop.p2pEndpoint(servicePath) }, publish=True) @ipfsOp async def updateDocument(self, ipfsop, document, publish=False): """ Update the document and set the 'previous' IPLD link """ now = normedUtcDate() self._document = document if self.docCid: self._document['previous'] = { '/': self.docCid } self._document['updated'] = now cid = await ipfsop.dagPut(document) if cid: self.docCid = cid if publish: ensure(self.publish()) await self.sChanged.emit(cid) await self.rdfPush() else: self.message('Could not inject new DID document!') async def rdfPush(self): # Push that to the RDF store await runningApp().s.rdfStore(IPFSPath(self.docCid)) @ipfsOp async def resolve(self, ipfsop, resolveTimeout=None): resolveTimeout = resolveTimeout if resolveTimeout else \ cGet('resolve.timeout') if self.local: hours = cGet('resolve.cacheLifetime.local.hours') else: hours = cGet('resolve.cacheLifetime.default.hours') maxLifetime = hours * 3600 useCache = 'always' cache = 'always' self.message('DID resolve: {did} (using cache: {usecache})'.format( did=self.ipnsKey, usecache=useCache)) return await ipfsop.nameResolveStreamFirst( joinIpns(self.ipnsKey), count=1, timeout=resolveTimeout, useCache=useCache, cache=cache, cacheOrigin='ipidmanager', maxCacheLifetime=maxLifetime ) async def refresh(self): staleValue = cGet('resolve.staleAfterDelay') last = self._lastResolve if self.local or not last or (loopTime() - last) > staleValue: self.message('Reloading') return await self.load() @ipfsOp async def load(self, ipfsop, pin=True, initialCid=None, resolveTimeout=30): if not initialCid: resolved = await self.resolve() if not resolved: self.message('Failed to resolve ?') return False dagCid = stripIpfs(resolved['Path']) else: self.message('Loading from initial CID: {}'.format(initialCid)) dagCid = initialCid self.message('DID resolves to {}'.format(dagCid)) if self.docCid == dagCid: # We already have this one self.message('DID document already at latest iteration') return False self._lastResolve = loopTime() if pin is True: await ipfsop.ctx.pin(dagCid, qname='ipid') self.message('Load: IPNS key resolved to {}'.format(dagCid)) doc = await ipfsop.dagGet(dagCid) if doc: self._document = doc self._latestModified = doc.get('modified') self.docCid = dagCid await self.sChanged.emit(dagCid) if self.local: # Local IPID: propagate did services async for service in self.discoverServices(): await self.sServiceAvailable.emit(self, service) # Graph it await self.rdfPush() return True return False @ipfsOp async def publish(self, ipfsop, timeout=None): """ Publish the DID document to the IPNS key We always cache the record so that your DID is always resolvable whatever the connection state. :rtype: bool """ # Get config settings timeout = timeout if timeout else cGet('publish.ipns.timeout') autoRepublish = cGet('publish.autoRepublish') republishDelay = cGet('publish.autoRepublishDelay') ipnsLifetime = cGet('publish.ipns.lifetime') ipnsTtl = cGet('publish.ipns.ttl') if not self.docCid: return False if autoRepublish is True: # Auto republish for local IPIDs ensureLater( republishDelay, self.publish ) try: if await ipfsop.publish(self.docCid, key=self.ipnsKey, lifetime=ipnsLifetime, ttl=ipnsTtl, cache='always', cacheOrigin='ipidmanager', timeout=timeout): self.message('Published !') self.message( 'Published IPID {did} with docCid: {docCid}'.format( did=self.did, docCid=self.docCid)) return True else: self.message('Error publishing IPID with DID: {did}'.format( did=self.did)) return False except Exception as e: self.message(str(e)) return False @ipfsOp async def dagGet(self, ipfsop, path): if self.docCid: dPath = posixpath.join(self.docCid, path) self.message('DID docget: {}'.format(dPath)) try: return await ipfsop.dagGet(dPath) except Exception as err: self.message('DAG get error for {p}: {err}'.format( p=dPath, err=str(err))) else: self.message('DAG get impossible (no DID document yet)') async def pubKeys(self): """ Async generator that yields each publicKey """ for pKey in await self.dagGet('publicKey'): yield pKey async def pubKeyWithId(self, id: str): """ Get first publicKey that matches the id :param str id: PublicKey id """ async for pKey in self.pubKeys(): if pKey.get('id') == id: return pKey async def pubKeyGet(self, idx=0): """ Returns the publicKey node with the idx index in the array :param int idx: Public key index :rtype: dict """ return await self.dagGet('publicKey/{}'.format(idx)) async def pubKeyPemGet(self, idx=0): """ Returns the publicKey PEM with the idx index in the array :rtype: str """ return await self.dagGet('publicKey/{}/publicKeyPem'.format(idx)) async def pubKeyPemGetWithId(self, keyId): """ Returns the publicKey PEM whose id matches keyId :rtype: str """ for key in await self.dagGet('publicKey'): if key.get('id') == keyId: return key['publicKeyPem'] async def getServices(self): node = await self.dagGet('service') return node if node else [] def _serviceInst(self, srv): stype = srv.get('type') for cname, sclass in IPServiceRegistry.IPSREGISTRY.items(): if stype in sclass.forTypes: return sclass(srv, self) async def discoverServices(self): for srv in await self.getServices(): _inst = self._serviceInst(srv) if _inst: yield _inst async def getServiceByType(self, srvType: str): """ Get first service found that matches the service type :param str srvType: Service type """ for srv in await self.getServices(): if srv.get('type') == srvType: return IPService(srv, self) async def searchServices(self, query: str): for srvNode in await self.getServices(): service = IPService(srvNode, self) if re.search(query, service.id, re.IGNORECASE): yield service async def searchServiceById(self, _id: str): for srvNode in await self.getServices(): if srvNode['id'] == _id: _inst = self._serviceInst(srvNode) if _inst: return _inst await asyncio.sleep(0) async def removeServiceById(self, _id: str): """ Remove the service with the given DID identifier """ try: for srv in self._document['service']: if srv['id'] == _id: self._document['service'].remove(srv) await self.flush() except Exception as err: log.debug(str(err)) async def avatarService(self): avatarServiceId = self.didUrl(path='/avatar') return await self.searchServiceById(avatarServiceId) async def avatarSet(self, ipfsPath): avatarServiceId = self.didUrl(path='/avatar') if not await self.avatarService(): await self.addServiceRaw({ 'id': avatarServiceId, 'type': IPService.SRV_TYPE_AVATAR, 'serviceEndpoint': str(ipfsPath), 'description': 'User Avatar' }, publish=True) else: async with self.editService(avatarServiceId) as editor: editor.service['serviceEndpoint'] = str(ipfsPath) @async_enterable async def editService(self, _id: str, sync=True): """ Edit the IP service with the given ID Returns an async context which flushes the DID document by default on leaving the context """ for srv in self._document['service']: if srv['id'] == _id: return IPServiceEditor(self, srv, sync=sync) async def _stopP2PServices(self): for srvName, srv in self._p2pServices.items(): await srv.stop() async def _stop(self): await self._stopP2PServices() async def upgrade(self): if not self.local: return ps = await self.searchServiceById( self.didUrl(path='/passport')) if not ps: # Create a dweb passport await passport.create(self) def __eq__(self, other): return self.did == other.did def __repr__(self): return self.did def __str__(self): return 'IP Identifier: {did}'.format(did=self.did)
class IPIdentifier(DAGOperations): """ InterPlanetary IDentifier (decentralized identity) This tries to follow the IPID spec as much as possible. """ def __init__(self, did, localId=False, ldCache=None): self._did = did self._document = {} self._docCid = None self._localId = localId self._lastResolve = None self._latestModified = None # JSON-LD expanded cache self.cache = ldCache if ldCache else LRUCache(4) # Async sigs self.sChanged = AsyncSignal(str) self.sServicesChanged = AsyncSignal() self.sChanged.connectTo(self.onDidChanged) @property def local(self): return self._localId is True @property def dagCid(self): return self.docCid @property def docCid(self): return self._docCid @docCid.setter def docCid(self, cid): self.message('Updating DIDDoc CID from {prev} to {n}'.format( prev=self._docCid, n=cid)) self._docCid = cid @property def doc(self): return self._document @property def did(self): return self._did @property def id(self): return self.ipnsKey @property def ipnsKey(self): exploded = didExplode(self.did) if exploded and exploded['method'] == 'ipid': return exploded['id'] @property def latestModified(self): return self._latestModified def contextUrl(self, path, fragment=None): return str( URL.build(host='galacteek.ld.contexts', scheme='ipschema', path='/' + path, fragment=fragment)) def message(self, msg, level='debug'): getattr(log, level)('IPID ({loc}) ({did}): {msg}'.format( did=self.did, msg=msg, loc='local' if self._localId else 'net')) def didFragment(self, frag): return self.didUrl(fragment=frag) def didUrl(self, params=None, path=None, query=None, fragment=None): """ Build a DID URL with the given params """ url = '{did}'.format(did=self.did) if isinstance(params, dict): p = ['{0}={1}'.format(k, str(v)) for k, v in params.items()] url += ';' + ';'.join(p) if isinstance(path, str): url += path if isinstance(query, dict): url += '?' + urlencode(query) if isinstance(fragment, str): if not fragment.startswith('#'): url += '#' + fragment else: url += fragment return url def dump(self): """ Dump the DID document to stdout """ print(json.dumps(self.doc, indent=4)) async def onDidChanged(self, cid): cacheKey = repr(self) if cacheKey in self.cache: # Reset expanded cache self.message('LRU cache reset') del self.cache[cacheKey] @ipfsOp async def inline(self, ipfsop, path=''): # In-line the JSON-LD contexts in the DAG for JSON-LD usage return await ipfsop.ldInline(await self.get(path=path)) @ipfsOp async def compact(self, ipfsop): pass @amlrucache async def expand(self): return await self._expand() @ipfsOp async def _expand(self, ipfsop, path=''): """ Perform a JSON-LD expansion on the DID document """ try: expanded = await jsonld.expand(await self.inline(path=path), { 'documentLoader': await aioipfs_document_loader(ipfsop.client) }) if isinstance(expanded, list) and len(expanded) > 0: return expanded[0] except Exception as err: self.message('Error expanding DID document: {}'.format(str(err))) @ipfsOp async def expandService(self, ipfsop, srvId): try: expanded = await self.expand() if not expanded: return None for srv in expanded.get('https://w3id.org/did#service', []): if srv.get('@id') == srvId: return srv except Exception: pass async def update(self, obj: dict, publish=False): self._document.update(obj) await self.updateDocument(self.doc, publish=publish) async def flush(self): await self.updateDocument(self.doc, publish=True) async def addServiceRaw(self, service: dict, publish=True): sid = service.get('id') assert isinstance(sid, str) didEx = didExplode(sid) assert didEx is not None if await self.searchServiceById(sid) is not None: raise IPIDServiceException( 'An IP service already exists with this ID') self._document['service'].append(service) await self.updateDocument(self.doc, publish=publish) await self.sServicesChanged.emit() @ipfsOp async def addServiceContexted(self, ipfsop, service: dict, endpoint=None, publish=True, context='IpfsObjectEndpoint'): sid = service.get('id') assert isinstance(sid, str) didEx = didExplode(sid) assert didEx is not None if await self.searchServiceById(sid) is not None: raise IPIDServiceException( 'An IP service already exists with this ID') srvCtx = await ipfsop.ldContext(context) if srvCtx: service['serviceEndpoint'] = {'@context': srvCtx} if isinstance(endpoint, dict): service['serviceEndpoint'].update(endpoint) self._document['service'].append(service) await self.updateDocument(self.doc, publish=publish) await self.sServicesChanged.emit() return self._serviceInst(service) @ipfsOp async def addServiceCollection(self, ipfsop, name): return await self.addServiceContexted( { 'id': self.didUrl(path=os.path.join('/collections', name)), 'type': IPService.SRV_TYPE_COLLECTION, }, context='ObjectsCollectionEndpoint', endpoint={ 'name': name, 'created': utcDatetimeIso(), 'objects': [] }, publish=True) @ipfsOp async def updateDocument(self, ipfsop, document, publish=False): """ Update the document and set the 'previous' IPLD link """ now = normedUtcDate() self._document = document if self.docCid: self._document['previous'] = {'/': self.docCid} self._document['updated'] = now cid = await ipfsop.dagPut(document) if cid: self.docCid = cid if publish: ensure(self.publish()) await self.sChanged.emit(cid) else: self.message('Could not inject new DID document!') @ipfsOp async def resolve(self, ipfsop, resolveTimeout=30): useCache = 'always' if self.local else 'never' maxLifetime = 86400 * 7 if self.local else 60 * 10 self.message('DID resolve: {did} (using cache: {usecache})'.format( did=self.ipnsKey, usecache=useCache)) return await ipfsop.nameResolveStreamFirst( joinIpns(self.ipnsKey), timeout=resolveTimeout, useCache=useCache, maxCacheLifetime=maxLifetime) async def refresh(self): if not self._lastResolve or \ (time.time() - self._lastResolve) > 60 * 1: self.message('Reloading') return await self.load() @ipfsOp async def load(self, ipfsop, pin=True, initialCid=None, resolveTimeout=30): if not initialCid: resolved = await self.resolve(resolveTimeout=resolveTimeout) if not resolved: self.message('Failed to resolve ?') return False dagCid = stripIpfs(resolved['Path']) else: self.message('Loading from initial CID: {}'.format(initialCid)) dagCid = initialCid self.message('DID resolves to {}'.format(dagCid)) if self.docCid == dagCid: # We already have this one self.message('DID document already at latest iteration') return False self._lastResolve = time.time() if pin is True: await ipfsop.ctx.pin(dagCid, qname='ipid') self.message('Load: IPNS key resolved to {}'.format(dagCid)) doc = await ipfsop.dagGet(dagCid) if doc: self._document = doc self._latestModified = doc.get('modified') self.docCid = dagCid await self.sChanged.emit(dagCid) return True return False @ipfsOp async def publish(self, ipfsop, timeout=60 * 5): """ Publish the DID document to the IPNS key We always cache the record so that your DID is always resolvable whatever the connection state. :rtype: bool """ if not self.docCid: return False try: if await ipfsop.publish(self.docCid, key=self.ipnsKey, lifetime='96h', cache='always', cacheOrigin='ipidmanager', timeout=timeout): self.message('Published !', level='info') self.message( 'Published IPID {did} with docCid: {docCid}'.format( did=self.did, docCid=self.docCid)) return True else: self.message('Error publishing IPID with DID: {did}'.format( did=self.did)) return False except Exception as e: self.message(str(e)) return False @ipfsOp async def dagGet(self, ipfsop, path): if self.docCid: dPath = os.path.join(self.docCid, path) self.message('DID docget: {}'.format(dPath)) try: return await ipfsop.dagGet(dPath) except Exception as err: self.message('DAG get error for {p}: {err}'.format( p=dPath, err=str(err))) else: self.message('DAG get impossible (no DID document yet)') async def pubKeys(self): """ Async generator that yields each publicKey """ for pKey in await self.dagGet('publicKey'): yield pKey async def pubKeyWithId(self, id: str): """ Get first publicKey that matches the id :param str id: PublicKey id """ async for pKey in self.pubKeys(): if pKey.get('id') == id: return pKey async def pubKeyGet(self, idx=0): """ Returns the publicKey node with the idx index in the array :param int idx: Public key index :rtype: dict """ return await self.dagGet('publicKey/{}'.format(idx)) async def pubKeyPemGet(self, idx=0): """ Returns the publicKey PEM with the idx index in the array :rtype: str """ return await self.dagGet('publicKey/{}/publicKeyPem'.format(idx)) async def pubKeyPemGetWithId(self, keyId): """ Returns the publicKey PEM whose id matches keyId :rtype: str """ for key in await self.dagGet('publicKey'): if key.get('id') == keyId: return key['publicKeyPem'] async def getServices(self): return await self.dagGet('service') def _serviceInst(self, srv): stype = srv.get('type') for cname, sclass in IPServiceRegistry.IPSREGISTRY.items(): if stype in sclass.forTypes: return sclass(srv, self) async def discoverServices(self): for srv in await self.getServices(): _inst = self._serviceInst(srv) if _inst: yield _inst async def getServiceByType(self, srvType: str): """ Get first service found that matches the service type :param str srvType: Service type """ for srv in await self.getServices(): if srv.get('type') == srvType: return IPService(srv, self) async def searchServices(self, query: str): for srvNode in await self.getServices(): service = IPService(srvNode, self) if re.search(query, service.id, re.IGNORECASE): yield service async def searchServiceById(self, _id: str): for srvNode in await self.getServices(): if srvNode['id'] == _id: _inst = self._serviceInst(srvNode) if _inst: return _inst await asyncio.sleep(0) async def removeServiceById(self, _id: str): """ Remove the service with the give DID identifier """ try: for srv in self._document['service']: if srv['id'] == _id: self._document['service'].remove(srv) await self.flush() except Exception as err: print(str(err)) pass @async_enterable async def editService(self, _id: str, sync=True): """ Edit the IP service with the given ID Returns an async context which flushes the DID document by default on leaving the context """ for srv in self._document['service']: if srv['id'] == _id: return IPServiceEditor(self, srv, sync=sync) def __eq__(self, other): return self.did == other.did def __repr__(self): return self.did def __str__(self): return 'IP Identifier: {did}'.format(did=self.did)