def assemble(self, actorKey, metaItem): logger.info(f'{actorKey} assembly metaItem : ' + str(metaItem['build'])) assembly = Article(metaItem['build']) renderer = self.getRenderer(assembly.typeKey) renderer.apply(actorKey, assembly) logger.info(f'renderer {actorKey} is assembled with {renderer.tmpltFile}') self.renderer[actorKey] = renderer
def __init__(self, actorKey, jmeta): for key, value in jmeta.items(): if isinstance(value, dict): jmeta[key] = Article(value) self.__dict__.update(jmeta) self.actorKey = actorKey self._cache = {}
def query(self, packet, render='Note'): result = self.select(packet) if render == 'Note': return Note(result) elif render == 'Article': return Article(result) else: return result
def get(self, socktype, sockopt={}): socket = self.socket(socktype, sockopt) port = socket.bind_to_random_port(self.hostAddr) sockAddr = f'{self.hostAddr}:{port}' sockware = Article({ 'socket':socket, 'address':sockAddr}) return sockware
def __start__(cls, genPacket): packet = genPacket.copy() for key, value in packet.items(): if isinstance(value,dict): packet[key] = Article(value) cls._meta = {} cls._meta.update(packet) cls._leveldb = LeveldbHash.get() return packet
def generate(self): jobMeta = self.assemble() self.setModuleParms() if self.make.build == "nobuild": logger.warn(f'make.build condition is negative, aborting {self.moduleName} build ...') else: self.build(jobMeta) self.importModule() return Article(jobMeta)
def assemble(self): params = {'eventKey':self.eventKey} jobMeta = self.query(Article(params)) self.merge(jobMeta['assembly']) self['releaseInfo'] = self.select('gitUser','owner','product','releaseTag') gitUser, owner, product, releaseTag = self.releaseInfo logger.info(f'release params : {gitUser} {owner} {product} {releaseTag}') productBase = f'{owner}-{releaseTag}/{product}' logger.info(f'productBase : {productBase}') return jobMeta
def apply(self, actorKey, jobMeta): serviceActor = jobMeta.pop('serviceActor') assemblyA = jobMeta['assembly'] self.__dict__[actorKey] = Article(assemblyA) for microKey, assemblyB in serviceActor.items(): subKey = f'{actorKey}:{microKey}' assemblyB.update(assemblyA) logger.info( f'{self.name}, applying {subKey} jobMeta : {assemblyB}') self.__dict__[subKey] = ActorBrief(subKey, assemblyB)
def make(cls, brokerId, frontPort=None): sockets = [] sockAddr = Article() frontend = cls.context.socket(zmq.ROUTER) sockets.append(frontend) requestAddr = cls.bind(frontend, cls.hostAddr, frontPort) logger.info(f'broker, requestAddr : {requestAddr}') backend = cls.context.socket(zmq.ROUTER) sockets.append(backend) responseAddr = cls.bind(backend, cls.hostAddr) logger.info(f'broker, responseAddr : {responseAddr}') return ZmqMessageBroker(brokerId, sockets, requestAddr, responseAddr)
async def runGroup(self, actorGroup, packet): logger.info(f'about to runGroup, {packet.actor} ...') result = Article({'complete': True, 'failed': True, 'signal': 500}) try: result = await self.executor.run(actorGroup, packet) if result.failed: logger.info( f'{packet.taskKey}, microservice failed, aborting ...') except asyncio.CancelledError: logger.warn(f'{packet.taskKey}, microservice task was canceled') except Exception as ex: logger.error(f'{packet.taskKey}, microservice task errored', exc_info=True) raise
async def run(self, actorGroup, packet, **kwargs): logger.info( f'### MicroserviceExecutor, about to run {packet.taskKey} ...') result = Article({'complete': True, 'failed': False, 'signal': 201}) futures = { self.getTask(actor, packet, taskNum): taskNum for taskNum, actor in actorGroup.ordActors } try: done, pending = await asyncio.wait(futures.keys(), return_when=FIRST_EXCEPTION) except asyncio.CancelledError: logger.exception(f'{packet.taskKey}, actorGroup is cancelled') result.failed = True return result # if there are pending tasks is because there was an exception # cancel any pending tasks for pendingJob in pending: pendingJob.cancel() # process the done tasks for doneJob in done: # if an exception is raised one of the Tasks will raise taskNum = futures[doneJob] try: actorId, actorName = actorGroup.tell(taskNum) doneJob.result() logger.info(f'{actorName} actor {actorId} is complete') except Exception as ex: logger.exception(f'{actorName} actor {actorId} errored', exc_info=True) result.merge({'taskNum': taskNum, 'failed': True}) return result
def getActor(self, packet): taskId = packet.taskId actorKey, article = self[packet.actor].tell(taskId) if article: # state machine is promoted logger.info( f'{packet.taskKey}, actor is already loaded, {article.body} ...' ) return article moduleName, className = self[actorKey].classToken.split(':') module = sys.modules.get(moduleName) if not module: module = importlib.import_module(moduleName) actorId = str(uuid.uuid4()) actor = getattr(module, className).make(self.context, actorId, packet) article = Article({'actorId': actorId, 'sockAddr': actor.sockAddr}) logger.info(f'{packet.taskKey}, loading new actor, {article.body} ...') self[actorKey].add(taskId, article) self.executor[actorId] = actor return article
def make(cls, jobId, peerMeta): logger.info(f'making {cls.__name__} for job {jobId} ...') peerNote = Article(peerMeta) MicroserviceB.arrange(jobId, peerNote) return cls(jobId, peerNote)
def make(cls, jobId, dspMeta): logger.info( f'{cls.__name__}, making MicroserviceContext for job {jobId} ...') dsProfile = Article(dspMeta) Microservice.arrange(dsProfile, 'ApiConnector') return cls(jobId, dsProfile)
def assemble(self, actorKey, metaItem): logger.info(f'{self.name}, initFw {actorKey} assembly : ' + str(metaItem['build'])) assembly = Article(metaItem['build']) self.jmeta[actorKey] = assembly
def getRenderData(self, actorKey): keys = ['jobId','projectId','className','moduleName'] logger.info(f'appActor {actorKey} render data keys : {keys}') className = self[actorKey].classToken.split(':')[-1] values = [self.jobId, self.projectId, className, actorKey] return Article(dict(zip(keys, values)))