def __init__(self, defaultProfile, name=None, otr=False, storageName=None, parent=None): if storageName: super(BaseProfile, self).__init__(storageName, parent=parent) else: super(BaseProfile, self).__init__(parent=parent) self.profileName = name self.defaults = defaultProfile self.app = QApplication.instance() self.webScripts = self.scripts() self.webSettings = self.settings() self.installIpfsSchemeHandlers() self.installScripts() self.downloadRequested.connect( self.app.downloadsManager.onDownloadRequested) # Listen to ps key: dweb.inter self.psListen(makeKeyService('dweb', 'inter'))
def __init__(self, dataPath: Path = None, runtimeConfig=None): super().__init__() self.app = runningApp() self.rtCfg = runtimeConfig if dataPath: self.rootPath = dataPath else: self.rootPath = self.app.dataPathForService('tmp') self.psKey = makeKeyService(self.name)
class RPSStatusButton(GMediumToolButton, KeyListener): """ Listens to RPS status messages from the pinning service """ psListenKeys = [makeKeyService('core', 'pinning')] rpsStatus = {} lock = None def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.lock = asyncio.Lock() self.setToolTip('No RPS status yet') async def event_g_services_core_pinning(self, key, message): from galacteek.services.core.pinning.events import RPSEvents event = message['event'] if event['type'] == RPSEvents.ServiceStatusSummary: status = event['serviceStatus'] await self.rpsStatusProcess(status) elif event['type'] == RPSEvents.RPSPinningHappening: name = event['Name'] self.app.systemTrayMessage( 'Remote Pinning', f'The content named <b>{name}</b> ' 'is being pinned') async def rpsStatusProcess(self, status): service = status['Service'] pinCount = status['Stat'].get('PinCount') if not pinCount: return items = status.get('Items') itemsSummary = None if items: itemsSummary = await self.rpsPinItemsSummary(items) async with self.lock: data = self.rpsStatus.setdefault(service, {}) data['pinCount'] = pinCount.copy() data['ltLast'] = loopTime() await self.updateStatus(itemsSummary) async def rpsPinItemsSummary(self, items: list): summary = '' for item in items: status = item.get('Status') if status == RPS_S_PINNING.lower(): summary += '<p>PINNING</p>' return summary async def updateStatus(self, itemsSummary): tooltip = '' now = loopTime() for service, data in self.rpsStatus.items(): pinCount = data['pinCount'] if now - data['ltLast'] > 600: # ignore tooltip += f'<div>Service {service} inactive</div>' continue queued = pinCount.get(RPS_S_QUEUED) pinned = pinCount.get(RPS_S_PINNED) pinning = pinCount.get(RPS_S_PINNING) failed = pinCount.get(RPS_S_FAILED) if isinstance(pinning, int) and pinning > 0: status = iRpcStatusPinning() elif isinstance(pinned, int) and pinned > 0: status = iRpsStatusPinnedObjCount(pinned) else: status = iRpsStatusOk() if failed and failed > 0: status += iRpsStatusSomeFail() tooltip += iRpsStatusSummary(service, status, pinned, pinning, queued, failed) self.setToolTip(tooltip)
def psKey(self): return makeKeyService(*(self.dotPath.split('.')))
class RDFStoresService(GService): name = 'graphs' rdfIdent = URIRef('g') psListenKeys = [makeKeyService('ld')] @property def graphG(self): return self._graphs['c0'] @property def historyService(self): return GService.byDotName.get('ld.rdf.graphs.history') def on_init(self): self._graphs = {} def graphByUri(self, uri): for n, graph in self._graphs.items(): if str(graph.identifier) == str(uri): return graph async def on_start(self): await super().on_start() # self.trailPath = self.rootPath.joinpath('gtrail.json') # self.trail = GraphingTrail(self.trailPath) self.storesPath = self.rootPath.joinpath('stores').joinpath('devel') self.storesPath.mkdir(parents=True, exist_ok=True) self.store = plugin.get("SQLAlchemy", Store)(identifier=self.rdfIdent) for cfg in self.serviceConfig.graphs: graph = IGraph(cfg.name, self.storesPath.joinpath(f'igraph_{cfg.name}'), self.store, identifier=cfg.uri) graph.open(graph.dbUri, create=True) # XXX: NS bind graph.iNsBind() graph.sCidChanged.connectTo(self.onGraphCidChanged) self._graphs[cfg.name] = graph async def declareIpfsComponents(self): self.psService = pubsub_graphs.RDFBazaarService( self.app.ipfsCtx, scheduler=self.app.scheduler, igraphs=self._graphs) self.psService.sExch.connectTo(self.onNewExchange) await self.ipfsPubsubService(self.psService) async def on_stop(self): log.debug('RDF stores: closing') # self.graphG.destroy(self.dbUri) try: self.graphG.close() except Exception: pass else: log.debug('RDF graph closed') @ipfsOp async def event_g_services_ld(self, ipfsop, key, message): event = message['event'] if self.disabled: return if event['type'] == 'DagRdfStorageRequest': path = IPFSPath(event['ipfsPath']) graphsIris = event.get('outputGraphs', [self.graphG.identifier]) if path.valid: result = await self.storeObject(ipfsop, path, graphs=graphsIris) if result is True: await self.historyService.trace(path, graphsIris) async def storeObject(self, ipfsop, path: IPFSPath, graphs=None): if isinstance(graphs, list): dst = graphs else: dst = [self.graphG.identifier] async with ipfsop.ldOps() as ld: objGraph = await ld.dagAsRdf(path) if not objGraph: return False # Purge blank nodes # purgeBlank(graph) ttl = io.BytesIO() objGraph.serialize(ttl, 'ttl') ttl.seek(0, 0) for uri in dst: destGraph = self.graphByUri(uri) if destGraph is not None: print('Storing in', uri, destGraph) try: destGraph.parse(ttl) except Exception: continue return True def ttlSerialize(self): ttl = io.BytesIO() serializer = otsrdflib.OrderedTurtleSerializer(self.graphG) serializer.serialize(ttl) ttl.seek(0, 0) return ttl.getvalue().decode() async def gQuery(self, query, initBindings=None): try: return await self.app.loop.run_in_executor(None, self.graphG.query, query) except Exception as err: print('query error', str(err)) pass async def onGraphCidChanged(self, name, cid): return msg = RDFGraphsExchangeMessage.make(self._graphs) print('sending', msg) await self.psService.send(msg) async def onNewExchange(self, eMsg: RDFGraphsExchangeMessage): for gd in eMsg.graphs: uri = gd['graphUri'] cid = gd['graphExportCid'] localG = self.graphByUri(uri) print('found', localG, 'for', uri) await localG.mergeFromCid(cid) print('merged', cid) @GService.task async def ttlDumpTask(self): if not self.app.debugEnabled: return while not self.should_stop: await asyncio.sleep(30) await self.graphG.exportTtl()
def __init__(self, stack): super().__init__(stack, WS_DMESSENGER, icon=getIcon('dmessenger/dmessenger.png'), description='Messenger') self.psListen(makeKeyService('net', 'bitmessage'))
async def rdfStore(self, ipfsPath: IPFSPath): await self.ldPublish({ 'type': 'DagRdfStorageRequest', 'ipfsPath': str(ipfsPath) }, key=makeKeyService('ld'))